vulkan-headers/include/vulkan/vulkan_structs.hpp
2025-06-06 13:50:11 +01:00

154373 lines
6.1 MiB

// Copyright 2015-2025 The Khronos Group Inc.
//
// SPDX-License-Identifier: Apache-2.0 OR MIT
//
// This header is generated from the Khronos Vulkan XML API Registry.
#ifndef VULKAN_STRUCTS_HPP
# define VULKAN_STRUCTS_HPP
// include-what-you-use: make sure, vulkan.hpp is used by code-completers
// IWYU pragma: private; include "vulkan.hpp"
#include <cstring> // strcmp
namespace VULKAN_HPP_NAMESPACE
{
//===============
//=== STRUCTS ===
//===============
// wrapper struct for struct VkAabbPositionsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAabbPositionsKHR.html
struct AabbPositionsKHR
{
using NativeType = VkAabbPositionsKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AabbPositionsKHR(float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {}) VULKAN_HPP_NOEXCEPT
: minX{ minX_ }, minY{ minY_ }, minZ{ minZ_ }, maxX{ maxX_ }, maxY{ maxY_ }, maxZ{ maxZ_ }
{}
VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AabbPositionsKHR( *reinterpret_cast<AabbPositionsKHR const *>( &rhs ) )
{}
AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AabbPositionsKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT
{
minX = minX_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT
{
minY = minY_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT
{
minZ = minZ_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT
{
maxX = maxX_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT
{
maxY = maxY_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT
{
maxZ = maxZ_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAabbPositionsKHR*>( this );
}
operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAabbPositionsKHR*>( this );
}
operator VkAabbPositionsKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAabbPositionsKHR*>( this );
}
operator VkAabbPositionsKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAabbPositionsKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<float const &, float const &, float const &, float const &, float const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( minX, minY, minZ, maxX, maxY, maxZ );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AabbPositionsKHR const & ) const = default;
#else
bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( minX == rhs.minX )
&& ( minY == rhs.minY )
&& ( minZ == rhs.minZ )
&& ( maxX == rhs.maxX )
&& ( maxY == rhs.maxY )
&& ( maxZ == rhs.maxZ );
#endif
}
bool operator!=( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
float minX = {};
float minY = {};
float minZ = {};
float maxX = {};
float maxY = {};
float maxZ = {};
};
using AabbPositionsNV = AabbPositionsKHR;
union DeviceOrHostAddressConstKHR
{
using NativeType = VkDeviceOrHostAddressConstKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
: deviceAddress( deviceAddress_ )
{}
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( const void * hostAddress_ )
: hostAddress( hostAddress_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
{
hostAddress = hostAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceOrHostAddressConstKHR const &() const
{
return *reinterpret_cast<const VkDeviceOrHostAddressConstKHR*>( this );
}
operator VkDeviceOrHostAddressConstKHR &()
{
return *reinterpret_cast<VkDeviceOrHostAddressConstKHR*>( this );
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
const void * hostAddress;
#else
VkDeviceAddress deviceAddress;
const void * hostAddress;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
// wrapper struct for struct VkAccelerationStructureGeometryTrianglesDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryTrianglesDataKHR.html
struct AccelerationStructureGeometryTrianglesDataKHR
{
using NativeType = VkAccelerationStructureGeometryTrianglesDataKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR(VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, uint32_t maxVertex_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, vertexFormat{ vertexFormat_ }, vertexData{ vertexData_ }, vertexStride{ vertexStride_ }, maxVertex{ maxVertex_ }, indexType{ indexType_ }, indexData{ indexData_ }, transformData{ transformData_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureGeometryTrianglesDataKHR( *reinterpret_cast<AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs ) )
{}
AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
{
vertexFormat = vertexFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
{
vertexData = vertexData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
{
vertexStride = vertexStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT
{
maxVertex = maxVertex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
{
indexData = indexData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT
{
transformData = transformData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
}
operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
}
operator VkAccelerationStructureGeometryTrianglesDataKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
}
operator VkAccelerationStructureGeometryTrianglesDataKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndexType const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, maxVertex, indexType, indexData, transformData );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
uint32_t maxVertex = {};
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryTrianglesDataKHR>
{
using Type = AccelerationStructureGeometryTrianglesDataKHR;
};
// wrapper struct for struct VkAccelerationStructureGeometryAabbsDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryAabbsDataKHR.html
struct AccelerationStructureGeometryAabbsDataKHR
{
using NativeType = VkAccelerationStructureGeometryAabbsDataKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, data{ data_ }, stride{ stride_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureGeometryAabbsDataKHR( *reinterpret_cast<AccelerationStructureGeometryAabbsDataKHR const *>( &rhs ) )
{}
AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
{
data = data_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
{
stride = stride_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureGeometryAabbsDataKHR*>( this );
}
operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureGeometryAabbsDataKHR*>( this );
}
operator VkAccelerationStructureGeometryAabbsDataKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureGeometryAabbsDataKHR*>( this );
}
operator VkAccelerationStructureGeometryAabbsDataKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureGeometryAabbsDataKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, data, stride );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryAabbsDataKHR>
{
using Type = AccelerationStructureGeometryAabbsDataKHR;
};
// wrapper struct for struct VkAccelerationStructureGeometryInstancesDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryInstancesDataKHR.html
struct AccelerationStructureGeometryInstancesDataKHR
{
using NativeType = VkAccelerationStructureGeometryInstancesDataKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR(VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, arrayOfPointers{ arrayOfPointers_ }, data{ data_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureGeometryInstancesDataKHR( *reinterpret_cast<AccelerationStructureGeometryInstancesDataKHR const *>( &rhs ) )
{}
AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT
{
arrayOfPointers = arrayOfPointers_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
{
data = data_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureGeometryInstancesDataKHR*>( this );
}
operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureGeometryInstancesDataKHR*>( this );
}
operator VkAccelerationStructureGeometryInstancesDataKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureGeometryInstancesDataKHR*>( this );
}
operator VkAccelerationStructureGeometryInstancesDataKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureGeometryInstancesDataKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, arrayOfPointers, data );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryInstancesDataKHR>
{
using Type = AccelerationStructureGeometryInstancesDataKHR;
};
union AccelerationStructureGeometryDataKHR
{
using NativeType = VkAccelerationStructureGeometryDataKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} )
: triangles( triangles_ )
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ )
: aabbs( aabbs_ )
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ )
: instances( instances_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT
{
triangles = triangles_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT
{
aabbs = aabbs_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT
{
instances = instances_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureGeometryDataKHR const &() const
{
return *reinterpret_cast<const VkAccelerationStructureGeometryDataKHR*>( this );
}
operator VkAccelerationStructureGeometryDataKHR &()
{
return *reinterpret_cast<VkAccelerationStructureGeometryDataKHR*>( this );
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles;
VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs;
VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances;
#else
VkAccelerationStructureGeometryTrianglesDataKHR triangles;
VkAccelerationStructureGeometryAabbsDataKHR aabbs;
VkAccelerationStructureGeometryInstancesDataKHR instances;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
// wrapper struct for struct VkAccelerationStructureGeometryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryKHR.html
struct AccelerationStructureGeometryKHR
{
using NativeType = VkAccelerationStructureGeometryKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, geometryType{ geometryType_ }, geometry{ geometry_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureGeometryKHR( *reinterpret_cast<AccelerationStructureGeometryKHR const *>( &rhs ) )
{}
AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
{
geometryType = geometryType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT
{
geometry = geometry_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureGeometryKHR*>( this );
}
operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureGeometryKHR*>( this );
}
operator VkAccelerationStructureGeometryKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureGeometryKHR*>( this );
}
operator VkAccelerationStructureGeometryKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureGeometryKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::GeometryTypeKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const &, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, geometryType, geometry, flags );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {};
VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryKHR>
{
using Type = AccelerationStructureGeometryKHR;
};
union DeviceOrHostAddressKHR
{
using NativeType = VkDeviceOrHostAddressKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
: deviceAddress( deviceAddress_ )
{}
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( void * hostAddress_ )
: hostAddress( hostAddress_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setHostAddress( void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
{
hostAddress = hostAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceOrHostAddressKHR const &() const
{
return *reinterpret_cast<const VkDeviceOrHostAddressKHR*>( this );
}
operator VkDeviceOrHostAddressKHR &()
{
return *reinterpret_cast<VkDeviceOrHostAddressKHR*>( this );
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
void * hostAddress;
#else
VkDeviceAddress deviceAddress;
void * hostAddress;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
// wrapper struct for struct VkAccelerationStructureBuildGeometryInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildGeometryInfoKHR.html
struct AccelerationStructureBuildGeometryInfoKHR
{
using NativeType = VkAccelerationStructureBuildGeometryInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, type{ type_ }, flags{ flags_ }, mode{ mode_ }, srcAccelerationStructure{ srcAccelerationStructure_ }, dstAccelerationStructure{ dstAccelerationStructure_ }, geometryCount{ geometryCount_ }, pGeometries{ pGeometries_ }, ppGeometries{ ppGeometries_ }, scratchData{ scratchData_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureBuildGeometryInfoKHR( *reinterpret_cast<AccelerationStructureBuildGeometryInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureBuildGeometryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), type( type_ ), flags( flags_ ), mode( mode_ ), srcAccelerationStructure( srcAccelerationStructure_ ), dstAccelerationStructure( dstAccelerationStructure_ ), geometryCount( static_cast<uint32_t>( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) ), pGeometries( geometries_.data() ), ppGeometries( pGeometries_.data() ), scratchData( scratchData_ )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) <= 1);
#else
if ( 1 < ( !geometries_.empty() + !pGeometries_.empty() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
srcAccelerationStructure = srcAccelerationStructure_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
dstAccelerationStructure = dstAccelerationStructure_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
{
geometryCount = geometryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT
{
pGeometries = pGeometries_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureBuildGeometryInfoKHR & setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_ ) VULKAN_HPP_NOEXCEPT
{
geometryCount = static_cast<uint32_t>( geometries_.size() );
pGeometries = geometries_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT
{
ppGeometries = ppGeometries_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureBuildGeometryInfoKHR & setPGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ ) VULKAN_HPP_NOEXCEPT
{
geometryCount = static_cast<uint32_t>( pGeometries_.size() );
ppGeometries = pGeometries_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
{
scratchData = scratchData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( this );
}
operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureBuildGeometryInfoKHR*>( this );
}
operator VkAccelerationStructureBuildGeometryInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( this );
}
operator VkAccelerationStructureBuildGeometryInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureBuildGeometryInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR const &, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR const &, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, type, flags, mode, srcAccelerationStructure, dstAccelerationStructure, geometryCount, pGeometries, ppGeometries, scratchData );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild;
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {};
uint32_t geometryCount = {};
const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries = {};
const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureBuildGeometryInfoKHR>
{
using Type = AccelerationStructureBuildGeometryInfoKHR;
};
// wrapper struct for struct VkAccelerationStructureBuildRangeInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildRangeInfoKHR.html
struct AccelerationStructureBuildRangeInfoKHR
{
using NativeType = VkAccelerationStructureBuildRangeInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR(uint32_t primitiveCount_ = {}, uint32_t primitiveOffset_ = {}, uint32_t firstVertex_ = {}, uint32_t transformOffset_ = {}) VULKAN_HPP_NOEXCEPT
: primitiveCount{ primitiveCount_ }, primitiveOffset{ primitiveOffset_ }, firstVertex{ firstVertex_ }, transformOffset{ transformOffset_ }
{}
VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureBuildRangeInfoKHR( *reinterpret_cast<AccelerationStructureBuildRangeInfoKHR const *>( &rhs ) )
{}
AccelerationStructureBuildRangeInfoKHR & operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureBuildRangeInfoKHR & operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT
{
primitiveCount = primitiveCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT
{
primitiveOffset = primitiveOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
{
firstVertex = firstVertex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT
{
transformOffset = transformOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR*>( this );
}
operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureBuildRangeInfoKHR*>( this );
}
operator VkAccelerationStructureBuildRangeInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR*>( this );
}
operator VkAccelerationStructureBuildRangeInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureBuildRangeInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( primitiveCount, primitiveOffset, firstVertex, transformOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default;
#else
bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( primitiveCount == rhs.primitiveCount )
&& ( primitiveOffset == rhs.primitiveOffset )
&& ( firstVertex == rhs.firstVertex )
&& ( transformOffset == rhs.transformOffset );
#endif
}
bool operator!=( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t primitiveCount = {};
uint32_t primitiveOffset = {};
uint32_t firstVertex = {};
uint32_t transformOffset = {};
};
// wrapper struct for struct VkAccelerationStructureBuildSizesInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildSizesInfoKHR.html
struct AccelerationStructureBuildSizesInfoKHR
{
using NativeType = VkAccelerationStructureBuildSizesInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildSizesInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR(VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, accelerationStructureSize{ accelerationStructureSize_ }, updateScratchSize{ updateScratchSize_ }, buildScratchSize{ buildScratchSize_ }
{}
VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureBuildSizesInfoKHR( *reinterpret_cast<AccelerationStructureBuildSizesInfoKHR const *>( &rhs ) )
{}
AccelerationStructureBuildSizesInfoKHR & operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureBuildSizesInfoKHR & operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const *>( &rhs );
return *this;
}
operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureBuildSizesInfoKHR*>( this );
}
operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR*>( this );
}
operator VkAccelerationStructureBuildSizesInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureBuildSizesInfoKHR*>( this );
}
operator VkAccelerationStructureBuildSizesInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, accelerationStructureSize, updateScratchSize, buildScratchSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureBuildSizesInfoKHR const & ) const = default;
#else
bool operator==( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( accelerationStructureSize == rhs.accelerationStructureSize )
&& ( updateScratchSize == rhs.updateScratchSize )
&& ( buildScratchSize == rhs.buildScratchSize );
#endif
}
bool operator!=( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureBuildSizesInfoKHR>
{
using Type = AccelerationStructureBuildSizesInfoKHR;
};
// wrapper struct for struct VkAccelerationStructureCaptureDescriptorDataInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCaptureDescriptorDataInfoEXT.html
struct AccelerationStructureCaptureDescriptorDataInfoEXT
{
using NativeType = VkAccelerationStructureCaptureDescriptorDataInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AccelerationStructureCaptureDescriptorDataInfoEXT(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, accelerationStructure{ accelerationStructure_ }, accelerationStructureNV{ accelerationStructureNV_ }
{}
VULKAN_HPP_CONSTEXPR AccelerationStructureCaptureDescriptorDataInfoEXT( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureCaptureDescriptorDataInfoEXT( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureCaptureDescriptorDataInfoEXT( *reinterpret_cast<AccelerationStructureCaptureDescriptorDataInfoEXT const *>( &rhs ) )
{}
AccelerationStructureCaptureDescriptorDataInfoEXT & operator=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureCaptureDescriptorDataInfoEXT & operator=( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureNV = accelerationStructureNV_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT*>( this );
}
operator VkAccelerationStructureCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureCaptureDescriptorDataInfoEXT*>( this );
}
operator VkAccelerationStructureCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT*>( this );
}
operator VkAccelerationStructureCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureCaptureDescriptorDataInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, accelerationStructure, accelerationStructureNV );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureCaptureDescriptorDataInfoEXT const & ) const = default;
#else
bool operator==( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( accelerationStructure == rhs.accelerationStructure )
&& ( accelerationStructureNV == rhs.accelerationStructureNV );
#endif
}
bool operator!=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT>
{
using Type = AccelerationStructureCaptureDescriptorDataInfoEXT;
};
// wrapper struct for struct VkAccelerationStructureCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCreateInfoKHR.html
struct AccelerationStructureCreateInfoKHR
{
using NativeType = VkAccelerationStructureCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, createFlags{ createFlags_ }, buffer{ buffer_ }, offset{ offset_ }, size{ size_ }, type{ type_ }, deviceAddress{ deviceAddress_ }
{}
VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureCreateInfoKHR( *reinterpret_cast<AccelerationStructureCreateInfoKHR const *>( &rhs ) )
{}
AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT
{
createFlags = createFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureCreateInfoKHR*>( this );
}
operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureCreateInfoKHR*>( this );
}
operator VkAccelerationStructureCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureCreateInfoKHR*>( this );
}
operator VkAccelerationStructureCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default;
#else
bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( createFlags == rhs.createFlags )
&& ( buffer == rhs.buffer )
&& ( offset == rhs.offset )
&& ( size == rhs.size )
&& ( type == rhs.type )
&& ( deviceAddress == rhs.deviceAddress );
#endif
}
bool operator!=( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoKHR>
{
using Type = AccelerationStructureCreateInfoKHR;
};
// wrapper struct for struct VkGeometryTrianglesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryTrianglesNV.html
struct GeometryTrianglesNV
{
using NativeType = VkGeometryTrianglesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GeometryTrianglesNV(VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, uint32_t vertexCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, uint32_t indexCount_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, vertexData{ vertexData_ }, vertexOffset{ vertexOffset_ }, vertexCount{ vertexCount_ }, vertexStride{ vertexStride_ }, vertexFormat{ vertexFormat_ }, indexData{ indexData_ }, indexOffset{ indexOffset_ }, indexCount{ indexCount_ }, indexType{ indexType_ }, transformData{ transformData_ }, transformOffset{ transformOffset_ }
{}
VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: GeometryTrianglesNV( *reinterpret_cast<GeometryTrianglesNV const *>( &rhs ) )
{}
GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
{
vertexData = vertexData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
{
vertexOffset = vertexOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
{
vertexCount = vertexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
{
vertexStride = vertexStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
{
vertexFormat = vertexFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
{
indexData = indexData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
{
indexOffset = indexOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
{
indexCount = indexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
{
transformData = transformData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
{
transformOffset = transformOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeometryTrianglesNV*>( this );
}
operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeometryTrianglesNV*>( this );
}
operator VkGeometryTrianglesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGeometryTrianglesNV*>( this );
}
operator VkGeometryTrianglesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGeometryTrianglesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndexType const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, vertexData, vertexOffset, vertexCount, vertexStride, vertexFormat, indexData, indexOffset, indexCount, indexType, transformData, transformOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GeometryTrianglesNV const & ) const = default;
#else
bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( vertexData == rhs.vertexData )
&& ( vertexOffset == rhs.vertexOffset )
&& ( vertexCount == rhs.vertexCount )
&& ( vertexStride == rhs.vertexStride )
&& ( vertexFormat == rhs.vertexFormat )
&& ( indexData == rhs.indexData )
&& ( indexOffset == rhs.indexOffset )
&& ( indexCount == rhs.indexCount )
&& ( indexType == rhs.indexType )
&& ( transformData == rhs.transformData )
&& ( transformOffset == rhs.transformOffset );
#endif
}
bool operator!=( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Buffer vertexData = {};
VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {};
uint32_t vertexCount = {};
VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::Buffer indexData = {};
VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {};
uint32_t indexCount = {};
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
VULKAN_HPP_NAMESPACE::Buffer transformData = {};
VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {};
};
template <>
struct CppType<StructureType, StructureType::eGeometryTrianglesNV>
{
using Type = GeometryTrianglesNV;
};
// wrapper struct for struct VkGeometryAABBNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryAABBNV.html
struct GeometryAABBNV
{
using NativeType = VkGeometryAABBNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GeometryAABBNV(VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, uint32_t numAABBs_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, aabbData{ aabbData_ }, numAABBs{ numAABBs_ }, stride{ stride_ }, offset{ offset_ }
{}
VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
: GeometryAABBNV( *reinterpret_cast<GeometryAABBNV const *>( &rhs ) )
{}
GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryAABBNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT
{
aabbData = aabbData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT
{
numAABBs = numAABBs_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
{
stride = stride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeometryAABBNV*>( this );
}
operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeometryAABBNV*>( this );
}
operator VkGeometryAABBNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGeometryAABBNV*>( this );
}
operator VkGeometryAABBNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGeometryAABBNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, aabbData, numAABBs, stride, offset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GeometryAABBNV const & ) const = default;
#else
bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( aabbData == rhs.aabbData )
&& ( numAABBs == rhs.numAABBs )
&& ( stride == rhs.stride )
&& ( offset == rhs.offset );
#endif
}
bool operator!=( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Buffer aabbData = {};
uint32_t numAABBs = {};
uint32_t stride = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
};
template <>
struct CppType<StructureType, StructureType::eGeometryAabbNV>
{
using Type = GeometryAABBNV;
};
// wrapper struct for struct VkGeometryDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryDataNV.html
struct GeometryDataNV
{
using NativeType = VkGeometryDataNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GeometryDataNV(VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {}) VULKAN_HPP_NOEXCEPT
: triangles{ triangles_ }, aabbs{ aabbs_ }
{}
VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
: GeometryDataNV( *reinterpret_cast<GeometryDataNV const *>( &rhs ) )
{}
GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryDataNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT
{
triangles = triangles_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT
{
aabbs = aabbs_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeometryDataNV*>( this );
}
operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeometryDataNV*>( this );
}
operator VkGeometryDataNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGeometryDataNV*>( this );
}
operator VkGeometryDataNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGeometryDataNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const &, VULKAN_HPP_NAMESPACE::GeometryAABBNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( triangles, aabbs );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GeometryDataNV const & ) const = default;
#else
bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( triangles == rhs.triangles )
&& ( aabbs == rhs.aabbs );
#endif
}
bool operator!=( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {};
VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {};
};
// wrapper struct for struct VkGeometryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryNV.html
struct GeometryNV
{
using NativeType = VkGeometryNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GeometryNV(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, geometryType{ geometryType_ }, geometry{ geometry_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
: GeometryNV( *reinterpret_cast<GeometryNV const *>( &rhs ) )
{}
GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GeometryNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
{
geometryType = geometryType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT
{
geometry = geometry_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeometryNV*>( this );
}
operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeometryNV*>( this );
}
operator VkGeometryNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGeometryNV*>( this );
}
operator VkGeometryNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGeometryNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::GeometryTypeKHR const &, VULKAN_HPP_NAMESPACE::GeometryDataNV const &, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, geometryType, geometry, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GeometryNV const & ) const = default;
#else
bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( geometryType == rhs.geometryType )
&& ( geometry == rhs.geometry )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {};
VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
};
template <>
struct CppType<StructureType, StructureType::eGeometryNV>
{
using Type = GeometryNV;
};
// wrapper struct for struct VkAccelerationStructureInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureInfoNV.html
struct AccelerationStructureInfoNV
{
using NativeType = VkAccelerationStructureInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, uint32_t instanceCount_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, type{ type_ }, flags{ flags_ }, instanceCount{ instanceCount_ }, geometryCount{ geometryCount_ }, pGeometries{ pGeometries_ }
{}
VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureInfoNV( *reinterpret_cast<AccelerationStructureInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, uint32_t instanceCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_, const void * pNext_ = nullptr )
: pNext( pNext_ ), type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( static_cast<uint32_t>( geometries_.size() ) ), pGeometries( geometries_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
{
instanceCount = instanceCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
{
geometryCount = geometryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT
{
pGeometries = pGeometries_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureInfoNV & setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ ) VULKAN_HPP_NOEXCEPT
{
geometryCount = static_cast<uint32_t>( geometries_.size() );
pGeometries = geometries_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureInfoNV*>( this );
}
operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureInfoNV*>( this );
}
operator VkAccelerationStructureInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureInfoNV*>( this );
}
operator VkAccelerationStructureInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV const &, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::GeometryNV * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, type, flags, instanceCount, geometryCount, pGeometries );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureInfoNV const & ) const = default;
#else
bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( type == rhs.type )
&& ( flags == rhs.flags )
&& ( instanceCount == rhs.instanceCount )
&& ( geometryCount == rhs.geometryCount )
&& ( pGeometries == rhs.pGeometries );
#endif
}
bool operator!=( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {};
VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {};
uint32_t instanceCount = {};
uint32_t geometryCount = {};
const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureInfoNV>
{
using Type = AccelerationStructureInfoNV;
};
// wrapper struct for struct VkAccelerationStructureCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCreateInfoNV.html
struct AccelerationStructureCreateInfoNV
{
using NativeType = VkAccelerationStructureCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, compactedSize{ compactedSize_ }, info{ info_ }
{}
VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureCreateInfoNV( *reinterpret_cast<AccelerationStructureCreateInfoNV const *>( &rhs ) )
{}
AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
{
compactedSize = compactedSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT
{
info = info_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( this );
}
operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureCreateInfoNV*>( this );
}
operator VkAccelerationStructureCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( this );
}
operator VkAccelerationStructureCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, compactedSize, info );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default;
#else
bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( compactedSize == rhs.compactedSize )
&& ( info == rhs.info );
#endif
}
bool operator!=( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoNV>
{
using Type = AccelerationStructureCreateInfoNV;
};
// wrapper struct for struct VkAccelerationStructureDeviceAddressInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureDeviceAddressInfoKHR.html
struct AccelerationStructureDeviceAddressInfoKHR
{
using NativeType = VkAccelerationStructureDeviceAddressInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, accelerationStructure{ accelerationStructure_ }
{}
VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureDeviceAddressInfoKHR( *reinterpret_cast<AccelerationStructureDeviceAddressInfoKHR const *>( &rhs ) )
{}
AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR*>( this );
}
operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureDeviceAddressInfoKHR*>( this );
}
operator VkAccelerationStructureDeviceAddressInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR*>( this );
}
operator VkAccelerationStructureDeviceAddressInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureDeviceAddressInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, accelerationStructure );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default;
#else
bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( accelerationStructure == rhs.accelerationStructure );
#endif
}
bool operator!=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureDeviceAddressInfoKHR>
{
using Type = AccelerationStructureDeviceAddressInfoKHR;
};
// wrapper struct for struct VkAccelerationStructureGeometryLinearSweptSpheresDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryLinearSweptSpheresDataNV.html
struct AccelerationStructureGeometryLinearSweptSpheresDataNV
{
using NativeType = VkAccelerationStructureGeometryLinearSweptSpheresDataNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV(VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::Format radiusFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR radiusData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize radiusStride_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV indexingMode_ = VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV::eList, VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV endCapsMode_ = VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV::eNone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, vertexFormat{ vertexFormat_ }, vertexData{ vertexData_ }, vertexStride{ vertexStride_ }, radiusFormat{ radiusFormat_ }, radiusData{ radiusData_ }, radiusStride{ radiusStride_ }, indexType{ indexType_ }, indexData{ indexData_ }, indexStride{ indexStride_ }, indexingMode{ indexingMode_ }, endCapsMode{ endCapsMode_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV( AccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureGeometryLinearSweptSpheresDataNV( VkAccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureGeometryLinearSweptSpheresDataNV( *reinterpret_cast<AccelerationStructureGeometryLinearSweptSpheresDataNV const *>( &rhs ) )
{}
AccelerationStructureGeometryLinearSweptSpheresDataNV & operator=( AccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureGeometryLinearSweptSpheresDataNV & operator=( VkAccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryLinearSweptSpheresDataNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
{
vertexFormat = vertexFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
{
vertexData = vertexData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
{
vertexStride = vertexStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setRadiusFormat( VULKAN_HPP_NAMESPACE::Format radiusFormat_ ) VULKAN_HPP_NOEXCEPT
{
radiusFormat = radiusFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setRadiusData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & radiusData_ ) VULKAN_HPP_NOEXCEPT
{
radiusData = radiusData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setRadiusStride( VULKAN_HPP_NAMESPACE::DeviceSize radiusStride_ ) VULKAN_HPP_NOEXCEPT
{
radiusStride = radiusStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
{
indexData = indexData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT
{
indexStride = indexStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setIndexingMode( VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV indexingMode_ ) VULKAN_HPP_NOEXCEPT
{
indexingMode = indexingMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setEndCapsMode( VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV endCapsMode_ ) VULKAN_HPP_NOEXCEPT
{
endCapsMode = endCapsMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureGeometryLinearSweptSpheresDataNV*>( this );
}
operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureGeometryLinearSweptSpheresDataNV*>( this );
}
operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureGeometryLinearSweptSpheresDataNV*>( this );
}
operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureGeometryLinearSweptSpheresDataNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::IndexType const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV const &, VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, radiusFormat, radiusData, radiusStride, indexType, indexData, indexStride, indexingMode, endCapsMode );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
VULKAN_HPP_NAMESPACE::Format radiusFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR radiusData = {};
VULKAN_HPP_NAMESPACE::DeviceSize radiusStride = {};
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {};
VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {};
VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV indexingMode = VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV::eList;
VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV endCapsMode = VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV::eNone;
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV>
{
using Type = AccelerationStructureGeometryLinearSweptSpheresDataNV;
};
// wrapper struct for struct VkAccelerationStructureGeometryMotionTrianglesDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryMotionTrianglesDataNV.html
struct AccelerationStructureGeometryMotionTrianglesDataNV
{
using NativeType = VkAccelerationStructureGeometryMotionTrianglesDataNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, vertexData{ vertexData_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureGeometryMotionTrianglesDataNV( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureGeometryMotionTrianglesDataNV( *reinterpret_cast<AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs ) )
{}
AccelerationStructureGeometryMotionTrianglesDataNV & operator=( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureGeometryMotionTrianglesDataNV & operator=( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
{
vertexData = vertexData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureGeometryMotionTrianglesDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureGeometryMotionTrianglesDataNV*>( this );
}
operator VkAccelerationStructureGeometryMotionTrianglesDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureGeometryMotionTrianglesDataNV*>( this );
}
operator VkAccelerationStructureGeometryMotionTrianglesDataNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureGeometryMotionTrianglesDataNV*>( this );
}
operator VkAccelerationStructureGeometryMotionTrianglesDataNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureGeometryMotionTrianglesDataNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, vertexData );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV>
{
using Type = AccelerationStructureGeometryMotionTrianglesDataNV;
};
// wrapper struct for struct VkAccelerationStructureGeometrySpheresDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometrySpheresDataNV.html
struct AccelerationStructureGeometrySpheresDataNV
{
using NativeType = VkAccelerationStructureGeometrySpheresDataNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometrySpheresDataNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV(VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::Format radiusFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR radiusData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize radiusStride_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, vertexFormat{ vertexFormat_ }, vertexData{ vertexData_ }, vertexStride{ vertexStride_ }, radiusFormat{ radiusFormat_ }, radiusData{ radiusData_ }, radiusStride{ radiusStride_ }, indexType{ indexType_ }, indexData{ indexData_ }, indexStride{ indexStride_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV( AccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureGeometrySpheresDataNV( VkAccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureGeometrySpheresDataNV( *reinterpret_cast<AccelerationStructureGeometrySpheresDataNV const *>( &rhs ) )
{}
AccelerationStructureGeometrySpheresDataNV & operator=( AccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureGeometrySpheresDataNV & operator=( VkAccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometrySpheresDataNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
{
vertexFormat = vertexFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
{
vertexData = vertexData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
{
vertexStride = vertexStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusFormat( VULKAN_HPP_NAMESPACE::Format radiusFormat_ ) VULKAN_HPP_NOEXCEPT
{
radiusFormat = radiusFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & radiusData_ ) VULKAN_HPP_NOEXCEPT
{
radiusData = radiusData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusStride( VULKAN_HPP_NAMESPACE::DeviceSize radiusStride_ ) VULKAN_HPP_NOEXCEPT
{
radiusStride = radiusStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
{
indexData = indexData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT
{
indexStride = indexStride_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureGeometrySpheresDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureGeometrySpheresDataNV*>( this );
}
operator VkAccelerationStructureGeometrySpheresDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureGeometrySpheresDataNV*>( this );
}
operator VkAccelerationStructureGeometrySpheresDataNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureGeometrySpheresDataNV*>( this );
}
operator VkAccelerationStructureGeometrySpheresDataNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureGeometrySpheresDataNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::IndexType const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, radiusFormat, radiusData, radiusStride, indexType, indexData, indexStride );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometrySpheresDataNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
VULKAN_HPP_NAMESPACE::Format radiusFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR radiusData = {};
VULKAN_HPP_NAMESPACE::DeviceSize radiusStride = {};
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {};
VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureGeometrySpheresDataNV>
{
using Type = AccelerationStructureGeometrySpheresDataNV;
};
// wrapper struct for struct VkTransformMatrixKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTransformMatrixKHR.html
struct TransformMatrixKHR
{
using NativeType = VkTransformMatrixKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR(std::array<std::array<float,4>,3> const & matrix_ = {}) VULKAN_HPP_NOEXCEPT
: matrix{ matrix_ }
{}
VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: TransformMatrixKHR( *reinterpret_cast<TransformMatrixKHR const *>( &rhs ) )
{}
TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & setMatrix( std::array<std::array<float,4>,3> matrix_ ) VULKAN_HPP_NOEXCEPT
{
matrix = matrix_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTransformMatrixKHR*>( this );
}
operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTransformMatrixKHR*>( this );
}
operator VkTransformMatrixKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTransformMatrixKHR*>( this );
}
operator VkTransformMatrixKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTransformMatrixKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper2D<float, 3, 4> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( matrix );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TransformMatrixKHR const & ) const = default;
#else
bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( matrix == rhs.matrix );
#endif
}
bool operator!=( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ArrayWrapper2D<float, 3, 4> matrix = {};
};
using TransformMatrixNV = TransformMatrixKHR;
// wrapper struct for struct VkAccelerationStructureInstanceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureInstanceKHR.html
struct AccelerationStructureInstanceKHR
{
using NativeType = VkAccelerationStructureInstanceKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT
: transform{ transform_ }, instanceCustomIndex{ instanceCustomIndex_ }, mask{ mask_ }, instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ }, flags{ flags_ }, accelerationStructureReference{ accelerationStructureReference_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureInstanceKHR( *reinterpret_cast<AccelerationStructureInstanceKHR const *>( &rhs ) )
{}
AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT
{
transform = transform_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
{
instanceCustomIndex = instanceCustomIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
{
mask = mask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
{
instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
return *this;
}
AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR*>(&flags_);
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureReference = accelerationStructureReference_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureInstanceKHR*>( this );
}
operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureInstanceKHR*>( this );
}
operator VkAccelerationStructureInstanceKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureInstanceKHR*>( this );
}
operator VkAccelerationStructureInstanceKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureInstanceKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, VkGeometryInstanceFlagsKHR const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( transform, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default;
#else
bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( transform == rhs.transform )
&& ( instanceCustomIndex == rhs.instanceCustomIndex )
&& ( mask == rhs.mask )
&& ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset )
&& ( flags == rhs.flags )
&& ( accelerationStructureReference == rhs.accelerationStructureReference );
#endif
}
bool operator!=( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {};
uint32_t instanceCustomIndex : 24;
uint32_t mask : 8;
uint32_t instanceShaderBindingTableRecordOffset : 24;
VkGeometryInstanceFlagsKHR flags : 8;
uint64_t accelerationStructureReference = {};
};
using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR;
// wrapper struct for struct VkAccelerationStructureMatrixMotionInstanceNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMatrixMotionInstanceNV.html
struct AccelerationStructureMatrixMotionInstanceNV
{
using NativeType = VkAccelerationStructureMatrixMotionInstanceNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0_ = {}, VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT
: transformT0{ transformT0_ }, transformT1{ transformT1_ }, instanceCustomIndex{ instanceCustomIndex_ }, mask{ mask_ }, instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ }, flags{ flags_ }, accelerationStructureReference{ accelerationStructureReference_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureMatrixMotionInstanceNV( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureMatrixMotionInstanceNV( *reinterpret_cast<AccelerationStructureMatrixMotionInstanceNV const *>( &rhs ) )
{}
AccelerationStructureMatrixMotionInstanceNV & operator=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureMatrixMotionInstanceNV & operator=( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setTransformT0( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT0_ ) VULKAN_HPP_NOEXCEPT
{
transformT0 = transformT0_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setTransformT1( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT1_ ) VULKAN_HPP_NOEXCEPT
{
transformT1 = transformT1_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
{
instanceCustomIndex = instanceCustomIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
{
mask = mask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
{
instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
return *this;
}
AccelerationStructureMatrixMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR*>(&flags_);
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureReference = accelerationStructureReference_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureMatrixMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureMatrixMotionInstanceNV*>( this );
}
operator VkAccelerationStructureMatrixMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureMatrixMotionInstanceNV*>( this );
}
operator VkAccelerationStructureMatrixMotionInstanceNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureMatrixMotionInstanceNV*>( this );
}
operator VkAccelerationStructureMatrixMotionInstanceNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureMatrixMotionInstanceNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &, VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, VkGeometryInstanceFlagsKHR const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureMatrixMotionInstanceNV const & ) const = default;
#else
bool operator==( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( transformT0 == rhs.transformT0 )
&& ( transformT1 == rhs.transformT1 )
&& ( instanceCustomIndex == rhs.instanceCustomIndex )
&& ( mask == rhs.mask )
&& ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset )
&& ( flags == rhs.flags )
&& ( accelerationStructureReference == rhs.accelerationStructureReference );
#endif
}
bool operator!=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0 = {};
VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1 = {};
uint32_t instanceCustomIndex : 24;
uint32_t mask : 8;
uint32_t instanceShaderBindingTableRecordOffset : 24;
VkGeometryInstanceFlagsKHR flags : 8;
uint64_t accelerationStructureReference = {};
};
// wrapper struct for struct VkAccelerationStructureMemoryRequirementsInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMemoryRequirementsInfoNV.html
struct AccelerationStructureMemoryRequirementsInfoNV
{
using NativeType = VkAccelerationStructureMemoryRequirementsInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, type{ type_ }, accelerationStructure{ accelerationStructure_ }
{}
VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureMemoryRequirementsInfoNV( *reinterpret_cast<AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs ) )
{}
AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
}
operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
}
operator VkAccelerationStructureMemoryRequirementsInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
}
operator VkAccelerationStructureMemoryRequirementsInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV const &, VULKAN_HPP_NAMESPACE::AccelerationStructureNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, type, accelerationStructure );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default;
#else
bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( type == rhs.type )
&& ( accelerationStructure == rhs.accelerationStructure );
#endif
}
bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject;
VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureMemoryRequirementsInfoNV>
{
using Type = AccelerationStructureMemoryRequirementsInfoNV;
};
// wrapper struct for struct VkAccelerationStructureMotionInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMotionInfoNV.html
struct AccelerationStructureMotionInfoNV
{
using NativeType = VkAccelerationStructureMotionInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMotionInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV(uint32_t maxInstances_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxInstances{ maxInstances_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureMotionInfoNV( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureMotionInfoNV( *reinterpret_cast<AccelerationStructureMotionInfoNV const *>( &rhs ) )
{}
AccelerationStructureMotionInfoNV & operator=( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureMotionInfoNV & operator=( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setMaxInstances( uint32_t maxInstances_ ) VULKAN_HPP_NOEXCEPT
{
maxInstances = maxInstances_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureMotionInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureMotionInfoNV*>( this );
}
operator VkAccelerationStructureMotionInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureMotionInfoNV*>( this );
}
operator VkAccelerationStructureMotionInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureMotionInfoNV*>( this );
}
operator VkAccelerationStructureMotionInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureMotionInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxInstances, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureMotionInfoNV const & ) const = default;
#else
bool operator==( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxInstances == rhs.maxInstances )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMotionInfoNV;
const void * pNext = {};
uint32_t maxInstances = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureMotionInfoNV>
{
using Type = AccelerationStructureMotionInfoNV;
};
// wrapper struct for struct VkSRTDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSRTDataNV.html
struct SRTDataNV
{
using NativeType = VkSRTDataNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SRTDataNV(float sx_ = {}, float a_ = {}, float b_ = {}, float pvx_ = {}, float sy_ = {}, float c_ = {}, float pvy_ = {}, float sz_ = {}, float pvz_ = {}, float qx_ = {}, float qy_ = {}, float qz_ = {}, float qw_ = {}, float tx_ = {}, float ty_ = {}, float tz_ = {}) VULKAN_HPP_NOEXCEPT
: sx{ sx_ }, a{ a_ }, b{ b_ }, pvx{ pvx_ }, sy{ sy_ }, c{ c_ }, pvy{ pvy_ }, sz{ sz_ }, pvz{ pvz_ }, qx{ qx_ }, qy{ qy_ }, qz{ qz_ }, qw{ qw_ }, tx{ tx_ }, ty{ ty_ }, tz{ tz_ }
{}
VULKAN_HPP_CONSTEXPR SRTDataNV( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SRTDataNV( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
: SRTDataNV( *reinterpret_cast<SRTDataNV const *>( &rhs ) )
{}
SRTDataNV & operator=( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SRTDataNV & operator=( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SRTDataNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSx( float sx_ ) VULKAN_HPP_NOEXCEPT
{
sx = sx_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setA( float a_ ) VULKAN_HPP_NOEXCEPT
{
a = a_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setB( float b_ ) VULKAN_HPP_NOEXCEPT
{
b = b_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvx( float pvx_ ) VULKAN_HPP_NOEXCEPT
{
pvx = pvx_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSy( float sy_ ) VULKAN_HPP_NOEXCEPT
{
sy = sy_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setC( float c_ ) VULKAN_HPP_NOEXCEPT
{
c = c_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvy( float pvy_ ) VULKAN_HPP_NOEXCEPT
{
pvy = pvy_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSz( float sz_ ) VULKAN_HPP_NOEXCEPT
{
sz = sz_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvz( float pvz_ ) VULKAN_HPP_NOEXCEPT
{
pvz = pvz_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQx( float qx_ ) VULKAN_HPP_NOEXCEPT
{
qx = qx_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQy( float qy_ ) VULKAN_HPP_NOEXCEPT
{
qy = qy_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQz( float qz_ ) VULKAN_HPP_NOEXCEPT
{
qz = qz_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQw( float qw_ ) VULKAN_HPP_NOEXCEPT
{
qw = qw_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTx( float tx_ ) VULKAN_HPP_NOEXCEPT
{
tx = tx_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTy( float ty_ ) VULKAN_HPP_NOEXCEPT
{
ty = ty_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTz( float tz_ ) VULKAN_HPP_NOEXCEPT
{
tz = tz_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSRTDataNV*>( this );
}
operator VkSRTDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSRTDataNV*>( this );
}
operator VkSRTDataNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSRTDataNV*>( this );
}
operator VkSRTDataNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSRTDataNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sx, a, b, pvx, sy, c, pvy, sz, pvz, qx, qy, qz, qw, tx, ty, tz );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SRTDataNV const & ) const = default;
#else
bool operator==( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sx == rhs.sx )
&& ( a == rhs.a )
&& ( b == rhs.b )
&& ( pvx == rhs.pvx )
&& ( sy == rhs.sy )
&& ( c == rhs.c )
&& ( pvy == rhs.pvy )
&& ( sz == rhs.sz )
&& ( pvz == rhs.pvz )
&& ( qx == rhs.qx )
&& ( qy == rhs.qy )
&& ( qz == rhs.qz )
&& ( qw == rhs.qw )
&& ( tx == rhs.tx )
&& ( ty == rhs.ty )
&& ( tz == rhs.tz );
#endif
}
bool operator!=( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
float sx = {};
float a = {};
float b = {};
float pvx = {};
float sy = {};
float c = {};
float pvy = {};
float sz = {};
float pvz = {};
float qx = {};
float qy = {};
float qz = {};
float qw = {};
float tx = {};
float ty = {};
float tz = {};
};
// wrapper struct for struct VkAccelerationStructureSRTMotionInstanceNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureSRTMotionInstanceNV.html
struct AccelerationStructureSRTMotionInstanceNV
{
using NativeType = VkAccelerationStructureSRTMotionInstanceNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV(VULKAN_HPP_NAMESPACE::SRTDataNV transformT0_ = {}, VULKAN_HPP_NAMESPACE::SRTDataNV transformT1_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT
: transformT0{ transformT0_ }, transformT1{ transformT1_ }, instanceCustomIndex{ instanceCustomIndex_ }, mask{ mask_ }, instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ }, flags{ flags_ }, accelerationStructureReference{ accelerationStructureReference_ }
{}
VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureSRTMotionInstanceNV( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureSRTMotionInstanceNV( *reinterpret_cast<AccelerationStructureSRTMotionInstanceNV const *>( &rhs ) )
{}
AccelerationStructureSRTMotionInstanceNV & operator=( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureSRTMotionInstanceNV & operator=( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setTransformT0( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT0_ ) VULKAN_HPP_NOEXCEPT
{
transformT0 = transformT0_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setTransformT1( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT1_ ) VULKAN_HPP_NOEXCEPT
{
transformT1 = transformT1_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
{
instanceCustomIndex = instanceCustomIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
{
mask = mask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
{
instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
return *this;
}
AccelerationStructureSRTMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR*>(&flags_);
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureReference = accelerationStructureReference_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureSRTMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureSRTMotionInstanceNV*>( this );
}
operator VkAccelerationStructureSRTMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureSRTMotionInstanceNV*>( this );
}
operator VkAccelerationStructureSRTMotionInstanceNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureSRTMotionInstanceNV*>( this );
}
operator VkAccelerationStructureSRTMotionInstanceNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureSRTMotionInstanceNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::SRTDataNV const &, VULKAN_HPP_NAMESPACE::SRTDataNV const &, uint32_t const &, uint32_t const &, uint32_t const &, VkGeometryInstanceFlagsKHR const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureSRTMotionInstanceNV const & ) const = default;
#else
bool operator==( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( transformT0 == rhs.transformT0 )
&& ( transformT1 == rhs.transformT1 )
&& ( instanceCustomIndex == rhs.instanceCustomIndex )
&& ( mask == rhs.mask )
&& ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset )
&& ( flags == rhs.flags )
&& ( accelerationStructureReference == rhs.accelerationStructureReference );
#endif
}
bool operator!=( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::SRTDataNV transformT0 = {};
VULKAN_HPP_NAMESPACE::SRTDataNV transformT1 = {};
uint32_t instanceCustomIndex : 24;
uint32_t mask : 8;
uint32_t instanceShaderBindingTableRecordOffset : 24;
VkGeometryInstanceFlagsKHR flags : 8;
uint64_t accelerationStructureReference = {};
};
union AccelerationStructureMotionInstanceDataNV
{
using NativeType = VkAccelerationStructureMotionInstanceDataNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance_ = {} )
: staticInstance( staticInstance_ )
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance_ )
: matrixMotionInstance( matrixMotionInstance_ )
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance_ )
: srtMotionInstance( srtMotionInstance_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setStaticInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & staticInstance_ ) VULKAN_HPP_NOEXCEPT
{
staticInstance = staticInstance_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setMatrixMotionInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & matrixMotionInstance_ ) VULKAN_HPP_NOEXCEPT
{
matrixMotionInstance = matrixMotionInstance_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setSrtMotionInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & srtMotionInstance_ ) VULKAN_HPP_NOEXCEPT
{
srtMotionInstance = srtMotionInstance_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureMotionInstanceDataNV const &() const
{
return *reinterpret_cast<const VkAccelerationStructureMotionInstanceDataNV*>( this );
}
operator VkAccelerationStructureMotionInstanceDataNV &()
{
return *reinterpret_cast<VkAccelerationStructureMotionInstanceDataNV*>( this );
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance;
VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance;
VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance;
#else
VkAccelerationStructureInstanceKHR staticInstance;
VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance;
VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
// wrapper struct for struct VkAccelerationStructureMotionInstanceNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMotionInstanceNV.html
struct AccelerationStructureMotionInstanceNV
{
using NativeType = VkAccelerationStructureMotionInstanceNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV(VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data_ = {}) VULKAN_HPP_NOEXCEPT
: type{ type_ }, flags{ flags_ }, data{ data_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureMotionInstanceNV( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureMotionInstanceNV( *reinterpret_cast<AccelerationStructureMotionInstanceNV const *>( &rhs ) )
{}
AccelerationStructureMotionInstanceNV & operator=( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureMotionInstanceNV & operator=( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setData( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const & data_ ) VULKAN_HPP_NOEXCEPT
{
data = data_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureMotionInstanceNV*>( this );
}
operator VkAccelerationStructureMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureMotionInstanceNV*>( this );
}
operator VkAccelerationStructureMotionInstanceNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureMotionInstanceNV*>( this );
}
operator VkAccelerationStructureMotionInstanceNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureMotionInstanceNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV const &, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV const &, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( type, flags, data );
}
#endif
public:
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic;
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data = {};
};
// wrapper struct for struct VkMicromapUsageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapUsageEXT.html
struct MicromapUsageEXT
{
using NativeType = VkMicromapUsageEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MicromapUsageEXT(uint32_t count_ = {}, uint32_t subdivisionLevel_ = {}, uint32_t format_ = {}) VULKAN_HPP_NOEXCEPT
: count{ count_ }, subdivisionLevel{ subdivisionLevel_ }, format{ format_ }
{}
VULKAN_HPP_CONSTEXPR MicromapUsageEXT( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MicromapUsageEXT( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MicromapUsageEXT( *reinterpret_cast<MicromapUsageEXT const *>( &rhs ) )
{}
MicromapUsageEXT & operator=( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MicromapUsageEXT & operator=( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapUsageEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT
{
count = count_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setSubdivisionLevel( uint32_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT
{
subdivisionLevel = subdivisionLevel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setFormat( uint32_t format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMicromapUsageEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMicromapUsageEXT*>( this );
}
operator VkMicromapUsageEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMicromapUsageEXT*>( this );
}
operator VkMicromapUsageEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMicromapUsageEXT*>( this );
}
operator VkMicromapUsageEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMicromapUsageEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( count, subdivisionLevel, format );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MicromapUsageEXT const & ) const = default;
#else
bool operator==( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( count == rhs.count )
&& ( subdivisionLevel == rhs.subdivisionLevel )
&& ( format == rhs.format );
#endif
}
bool operator!=( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t count = {};
uint32_t subdivisionLevel = {};
uint32_t format = {};
};
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkAccelerationStructureTrianglesDisplacementMicromapNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureTrianglesDisplacementMicromapNV.html
struct AccelerationStructureTrianglesDisplacementMicromapNV
{
using NativeType = VkAccelerationStructureTrianglesDisplacementMicromapNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV(VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format displacementVectorFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementVectorBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, uint32_t baseTriangle_ = {}, uint32_t usageCountsCount_ = {}, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, displacementBiasAndScaleFormat{ displacementBiasAndScaleFormat_ }, displacementVectorFormat{ displacementVectorFormat_ }, displacementBiasAndScaleBuffer{ displacementBiasAndScaleBuffer_ }, displacementBiasAndScaleStride{ displacementBiasAndScaleStride_ }, displacementVectorBuffer{ displacementVectorBuffer_ }, displacementVectorStride{ displacementVectorStride_ }, displacedMicromapPrimitiveFlags{ displacedMicromapPrimitiveFlags_ }, displacedMicromapPrimitiveFlagsStride{ displacedMicromapPrimitiveFlagsStride_ }, indexType{ indexType_ }, indexBuffer{ indexBuffer_ }, indexStride{ indexStride_ }, baseTriangle{ baseTriangle_ }, usageCountsCount{ usageCountsCount_ }, pUsageCounts{ pUsageCounts_ }, ppUsageCounts{ ppUsageCounts_ }, micromap{ micromap_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV( AccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureTrianglesDisplacementMicromapNV( VkAccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureTrianglesDisplacementMicromapNV( *reinterpret_cast<AccelerationStructureTrianglesDisplacementMicromapNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureTrianglesDisplacementMicromapNV( VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat_, VULKAN_HPP_NAMESPACE::Format displacementVectorFormat_, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride_, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementVectorBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride_, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags_, VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride_, VULKAN_HPP_NAMESPACE::IndexType indexType_, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize indexStride_, uint32_t baseTriangle_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ = {}, VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, void * pNext_ = nullptr )
: pNext( pNext_ ), displacementBiasAndScaleFormat( displacementBiasAndScaleFormat_ ), displacementVectorFormat( displacementVectorFormat_ ), displacementBiasAndScaleBuffer( displacementBiasAndScaleBuffer_ ), displacementBiasAndScaleStride( displacementBiasAndScaleStride_ ), displacementVectorBuffer( displacementVectorBuffer_ ), displacementVectorStride( displacementVectorStride_ ), displacedMicromapPrimitiveFlags( displacedMicromapPrimitiveFlags_ ), displacedMicromapPrimitiveFlagsStride( displacedMicromapPrimitiveFlagsStride_ ), indexType( indexType_ ), indexBuffer( indexBuffer_ ), indexStride( indexStride_ ), baseTriangle( baseTriangle_ ), usageCountsCount( static_cast<uint32_t>( usageCounts_.size() ) ), pUsageCounts( usageCounts_.data() ), ppUsageCounts( pUsageCounts_.data() ), micromap( micromap_ )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( usageCounts_.empty() || pUsageCounts_.empty() || ( usageCounts_.size() == pUsageCounts_.size() ) );
#else
if ( !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::AccelerationStructureTrianglesDisplacementMicromapNV::AccelerationStructureTrianglesDisplacementMicromapNV: !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() )" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
AccelerationStructureTrianglesDisplacementMicromapNV & operator=( AccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureTrianglesDisplacementMicromapNV & operator=( VkAccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesDisplacementMicromapNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacementBiasAndScaleFormat( VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat_ ) VULKAN_HPP_NOEXCEPT
{
displacementBiasAndScaleFormat = displacementBiasAndScaleFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacementVectorFormat( VULKAN_HPP_NAMESPACE::Format displacementVectorFormat_ ) VULKAN_HPP_NOEXCEPT
{
displacementVectorFormat = displacementVectorFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacementBiasAndScaleBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & displacementBiasAndScaleBuffer_ ) VULKAN_HPP_NOEXCEPT
{
displacementBiasAndScaleBuffer = displacementBiasAndScaleBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacementBiasAndScaleStride( VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride_ ) VULKAN_HPP_NOEXCEPT
{
displacementBiasAndScaleStride = displacementBiasAndScaleStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacementVectorBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & displacementVectorBuffer_ ) VULKAN_HPP_NOEXCEPT
{
displacementVectorBuffer = displacementVectorBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacementVectorStride( VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride_ ) VULKAN_HPP_NOEXCEPT
{
displacementVectorStride = displacementVectorStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacedMicromapPrimitiveFlags( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & displacedMicromapPrimitiveFlags_ ) VULKAN_HPP_NOEXCEPT
{
displacedMicromapPrimitiveFlags = displacedMicromapPrimitiveFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacedMicromapPrimitiveFlagsStride( VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride_ ) VULKAN_HPP_NOEXCEPT
{
displacedMicromapPrimitiveFlagsStride = displacedMicromapPrimitiveFlagsStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
indexBuffer = indexBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT
{
indexStride = indexStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT
{
baseTriangle = baseTriangle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT
{
usageCountsCount = usageCountsCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
{
pUsageCounts = pUsageCounts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureTrianglesDisplacementMicromapNV & setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_ ) VULKAN_HPP_NOEXCEPT
{
usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
pUsageCounts = usageCounts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT
{
ppUsageCounts = ppUsageCounts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureTrianglesDisplacementMicromapNV & setPUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
{
usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
ppUsageCounts = pUsageCounts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT
{
micromap = micromap_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureTrianglesDisplacementMicromapNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureTrianglesDisplacementMicromapNV*>( this );
}
operator VkAccelerationStructureTrianglesDisplacementMicromapNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureTrianglesDisplacementMicromapNV*>( this );
}
operator VkAccelerationStructureTrianglesDisplacementMicromapNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureTrianglesDisplacementMicromapNV*>( this );
}
operator VkAccelerationStructureTrianglesDisplacementMicromapNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureTrianglesDisplacementMicromapNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::IndexType const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const &, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, displacementBiasAndScaleFormat, displacementVectorFormat, displacementBiasAndScaleBuffer, displacementBiasAndScaleStride, displacementVectorBuffer, displacementVectorStride, displacedMicromapPrimitiveFlags, displacedMicromapPrimitiveFlagsStride, indexType, indexBuffer, indexStride, baseTriangle, usageCountsCount, pUsageCounts, ppUsageCounts, micromap );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::Format displacementVectorFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementVectorBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags = {};
VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride = {};
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {};
uint32_t baseTriangle = {};
uint32_t usageCountsCount = {};
const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {};
const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {};
VULKAN_HPP_NAMESPACE::MicromapEXT micromap = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV>
{
using Type = AccelerationStructureTrianglesDisplacementMicromapNV;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
// wrapper struct for struct VkAccelerationStructureTrianglesOpacityMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureTrianglesOpacityMicromapEXT.html
struct AccelerationStructureTrianglesOpacityMicromapEXT
{
using NativeType = VkAccelerationStructureTrianglesOpacityMicromapEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT(VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, uint32_t baseTriangle_ = {}, uint32_t usageCountsCount_ = {}, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, indexType{ indexType_ }, indexBuffer{ indexBuffer_ }, indexStride{ indexStride_ }, baseTriangle{ baseTriangle_ }, usageCountsCount{ usageCountsCount_ }, pUsageCounts{ pUsageCounts_ }, ppUsageCounts{ ppUsageCounts_ }, micromap{ micromap_ }
{}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureTrianglesOpacityMicromapEXT( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureTrianglesOpacityMicromapEXT( *reinterpret_cast<AccelerationStructureTrianglesOpacityMicromapEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureTrianglesOpacityMicromapEXT( VULKAN_HPP_NAMESPACE::IndexType indexType_, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize indexStride_, uint32_t baseTriangle_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ = {}, VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, void * pNext_ = nullptr )
: pNext( pNext_ ), indexType( indexType_ ), indexBuffer( indexBuffer_ ), indexStride( indexStride_ ), baseTriangle( baseTriangle_ ), usageCountsCount( static_cast<uint32_t>( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) ), pUsageCounts( usageCounts_.data() ), ppUsageCounts( pUsageCounts_.data() ), micromap( micromap_ )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1);
#else
if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::AccelerationStructureTrianglesOpacityMicromapEXT::AccelerationStructureTrianglesOpacityMicromapEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
AccelerationStructureTrianglesOpacityMicromapEXT & operator=( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureTrianglesOpacityMicromapEXT & operator=( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
indexBuffer = indexBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT
{
indexStride = indexStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT
{
baseTriangle = baseTriangle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT
{
usageCountsCount = usageCountsCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
{
pUsageCounts = pUsageCounts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureTrianglesOpacityMicromapEXT & setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_ ) VULKAN_HPP_NOEXCEPT
{
usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
pUsageCounts = usageCounts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT
{
ppUsageCounts = ppUsageCounts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureTrianglesOpacityMicromapEXT & setPUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
{
usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
ppUsageCounts = pUsageCounts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT
{
micromap = micromap_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureTrianglesOpacityMicromapEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureTrianglesOpacityMicromapEXT*>( this );
}
operator VkAccelerationStructureTrianglesOpacityMicromapEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureTrianglesOpacityMicromapEXT*>( this );
}
operator VkAccelerationStructureTrianglesOpacityMicromapEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureTrianglesOpacityMicromapEXT*>( this );
}
operator VkAccelerationStructureTrianglesOpacityMicromapEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureTrianglesOpacityMicromapEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::IndexType const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const &, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, indexType, indexBuffer, indexStride, baseTriangle, usageCountsCount, pUsageCounts, ppUsageCounts, micromap );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {};
uint32_t baseTriangle = {};
uint32_t usageCountsCount = {};
const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {};
const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {};
VULKAN_HPP_NAMESPACE::MicromapEXT micromap = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT>
{
using Type = AccelerationStructureTrianglesOpacityMicromapEXT;
};
// wrapper struct for struct VkAccelerationStructureVersionInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureVersionInfoKHR.html
struct AccelerationStructureVersionInfoKHR
{
using NativeType = VkAccelerationStructureVersionInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR(const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pVersionData{ pVersionData_ }
{}
VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureVersionInfoKHR( *reinterpret_cast<AccelerationStructureVersionInfoKHR const *>( &rhs ) )
{}
AccelerationStructureVersionInfoKHR & operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AccelerationStructureVersionInfoKHR & operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT
{
pVersionData = pVersionData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureVersionInfoKHR*>( this );
}
operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureVersionInfoKHR*>( this );
}
operator VkAccelerationStructureVersionInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAccelerationStructureVersionInfoKHR*>( this );
}
operator VkAccelerationStructureVersionInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAccelerationStructureVersionInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const uint8_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pVersionData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default;
#else
bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pVersionData == rhs.pVersionData );
#endif
}
bool operator!=( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionInfoKHR;
const void * pNext = {};
const uint8_t * pVersionData = {};
};
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureVersionInfoKHR>
{
using Type = AccelerationStructureVersionInfoKHR;
};
// wrapper struct for struct VkAcquireNextImageInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAcquireNextImageInfoKHR.html
struct AcquireNextImageInfoKHR
{
using NativeType = VkAcquireNextImageInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint64_t timeout_ = {}, VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::Fence fence_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, swapchain{ swapchain_ }, timeout{ timeout_ }, semaphore{ semaphore_ }, fence{ fence_ }, deviceMask{ deviceMask_ }
{}
VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AcquireNextImageInfoKHR( *reinterpret_cast<AcquireNextImageInfoKHR const *>( &rhs ) )
{}
AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
{
swapchain = swapchain_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
{
timeout = timeout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
fence = fence_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
{
deviceMask = deviceMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAcquireNextImageInfoKHR*>( this );
}
operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAcquireNextImageInfoKHR*>( this );
}
operator VkAcquireNextImageInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAcquireNextImageInfoKHR*>( this );
}
operator VkAcquireNextImageInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAcquireNextImageInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &, uint64_t const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::Fence const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, swapchain, timeout, semaphore, fence, deviceMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AcquireNextImageInfoKHR const & ) const = default;
#else
bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchain == rhs.swapchain )
&& ( timeout == rhs.timeout )
&& ( semaphore == rhs.semaphore )
&& ( fence == rhs.fence )
&& ( deviceMask == rhs.deviceMask );
#endif
}
bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
uint64_t timeout = {};
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
VULKAN_HPP_NAMESPACE::Fence fence = {};
uint32_t deviceMask = {};
};
template <>
struct CppType<StructureType, StructureType::eAcquireNextImageInfoKHR>
{
using Type = AcquireNextImageInfoKHR;
};
// wrapper struct for struct VkAcquireProfilingLockInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAcquireProfilingLockInfoKHR.html
struct AcquireProfilingLockInfoKHR
{
using NativeType = VkAcquireProfilingLockInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR(VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, timeout{ timeout_ }
{}
VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AcquireProfilingLockInfoKHR( *reinterpret_cast<AcquireProfilingLockInfoKHR const *>( &rhs ) )
{}
AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
{
timeout = timeout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( this );
}
operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAcquireProfilingLockInfoKHR*>( this );
}
operator VkAcquireProfilingLockInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( this );
}
operator VkAcquireProfilingLockInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAcquireProfilingLockInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, timeout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default;
#else
bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( timeout == rhs.timeout );
#endif
}
bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {};
uint64_t timeout = {};
};
template <>
struct CppType<StructureType, StructureType::eAcquireProfilingLockInfoKHR>
{
using Type = AcquireProfilingLockInfoKHR;
};
typedef void * (VKAPI_PTR *PFN_AllocationFunction)( void * pUserData, size_t size, size_t alignment, VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope );
typedef void * (VKAPI_PTR *PFN_ReallocationFunction)( void * pUserData, void * pOriginal, size_t size, size_t alignment, VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope );
typedef void (VKAPI_PTR *PFN_FreeFunction)( void * pUserData, void * pMemory );
typedef void (VKAPI_PTR *PFN_InternalAllocationNotification)( void * pUserData, size_t size, VULKAN_HPP_NAMESPACE::InternalAllocationType allocationType, VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope );
typedef void (VKAPI_PTR *PFN_InternalFreeNotification)( void * pUserData, size_t size, VULKAN_HPP_NAMESPACE::InternalAllocationType allocationType, VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope );
// wrapper struct for struct VkAllocationCallbacks, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAllocationCallbacks.html
struct AllocationCallbacks
{
using NativeType = VkAllocationCallbacks;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AllocationCallbacks(void * pUserData_ = {}, VULKAN_HPP_NAMESPACE::PFN_AllocationFunction pfnAllocation_ = {}, VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction pfnReallocation_ = {}, VULKAN_HPP_NAMESPACE::PFN_FreeFunction pfnFree_ = {}, VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification pfnInternalAllocation_ = {}, VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification pfnInternalFree_ = {}) VULKAN_HPP_NOEXCEPT
: pUserData{ pUserData_ }, pfnAllocation{ pfnAllocation_ }, pfnReallocation{ pfnReallocation_ }, pfnFree{ pfnFree_ }, pfnInternalAllocation{ pfnInternalAllocation_ }, pfnInternalFree{ pfnInternalFree_ }
{}
VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
: AllocationCallbacks( *reinterpret_cast<AllocationCallbacks const *>( &rhs ) )
{}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic push
# if defined( __clang__ )
# pragma clang diagnostic ignored "-Wunknown-warning-option"
# endif
# pragma GCC diagnostic ignored "-Wcast-function-type"
#endif
VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." )
AllocationCallbacks( void * pUserData_,
PFN_vkAllocationFunction pfnAllocation_,
PFN_vkReallocationFunction pfnReallocation_ = {},
PFN_vkFreeFunction pfnFree_ = {},
PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {},
PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT
: AllocationCallbacks( pUserData_,
reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_AllocationFunction>( pfnAllocation_ ),
reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction>( pfnReallocation_ ),
reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_FreeFunction>( pfnFree_ ),
reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification>( pfnInternalAllocation_ ),
reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification>( pfnInternalFree_ ) )
{
}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic pop
#endif
AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AllocationCallbacks const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
{
pUserData = pUserData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( VULKAN_HPP_NAMESPACE::PFN_AllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
{
pfnAllocation = pfnAllocation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
{
pfnReallocation = pfnReallocation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( VULKAN_HPP_NAMESPACE::PFN_FreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT
{
pfnFree = pfnFree_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalAllocation( VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
{
pfnInternalAllocation = pfnInternalAllocation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
{
pfnInternalFree = pfnInternalFree_;
return *this;
}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic push
# if defined( __clang__ )
# pragma clang diagnostic ignored "-Wunknown-warning-option"
# endif
# pragma GCC diagnostic ignored "-Wcast-function-type"
#endif
VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )
AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
{
return setPfnAllocation( reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_AllocationFunction>( pfnAllocation_ ) );
}
VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )
AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
{
return setPfnReallocation( reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction>( pfnReallocation_ ) );
}
VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )
AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
{
return setPfnInternalAllocation( reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification>( pfnInternalAllocation_ ) );
}
VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )
AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
{
return setPfnInternalFree( reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification>( pfnInternalFree_ ) );
}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic pop
#endif
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAllocationCallbacks*>( this );
}
operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAllocationCallbacks*>( this );
}
operator VkAllocationCallbacks const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAllocationCallbacks*>( this );
}
operator VkAllocationCallbacks *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAllocationCallbacks*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<void * const &, VULKAN_HPP_NAMESPACE::PFN_AllocationFunction const &, VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction const &, VULKAN_HPP_NAMESPACE::PFN_FreeFunction const &, VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification const &, VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( pUserData, pfnAllocation, pfnReallocation, pfnFree, pfnInternalAllocation, pfnInternalFree );
}
#endif
bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( pUserData == rhs.pUserData )
&& ( pfnAllocation == rhs.pfnAllocation )
&& ( pfnReallocation == rhs.pfnReallocation )
&& ( pfnFree == rhs.pfnFree )
&& ( pfnInternalAllocation == rhs.pfnInternalAllocation )
&& ( pfnInternalFree == rhs.pfnInternalFree );
#endif
}
bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
void * pUserData = {};
VULKAN_HPP_NAMESPACE::PFN_AllocationFunction pfnAllocation = {};
VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction pfnReallocation = {};
VULKAN_HPP_NAMESPACE::PFN_FreeFunction pfnFree = {};
VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification pfnInternalAllocation = {};
VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification pfnInternalFree = {};
};
// wrapper struct for struct VkAmigoProfilingSubmitInfoSEC, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAmigoProfilingSubmitInfoSEC.html
struct AmigoProfilingSubmitInfoSEC
{
using NativeType = VkAmigoProfilingSubmitInfoSEC;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAmigoProfilingSubmitInfoSEC;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AmigoProfilingSubmitInfoSEC(uint64_t firstDrawTimestamp_ = {}, uint64_t swapBufferTimestamp_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, firstDrawTimestamp{ firstDrawTimestamp_ }, swapBufferTimestamp{ swapBufferTimestamp_ }
{}
VULKAN_HPP_CONSTEXPR AmigoProfilingSubmitInfoSEC( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AmigoProfilingSubmitInfoSEC( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT
: AmigoProfilingSubmitInfoSEC( *reinterpret_cast<AmigoProfilingSubmitInfoSEC const *>( &rhs ) )
{}
AmigoProfilingSubmitInfoSEC & operator=( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AmigoProfilingSubmitInfoSEC & operator=( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setFirstDrawTimestamp( uint64_t firstDrawTimestamp_ ) VULKAN_HPP_NOEXCEPT
{
firstDrawTimestamp = firstDrawTimestamp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setSwapBufferTimestamp( uint64_t swapBufferTimestamp_ ) VULKAN_HPP_NOEXCEPT
{
swapBufferTimestamp = swapBufferTimestamp_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAmigoProfilingSubmitInfoSEC const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAmigoProfilingSubmitInfoSEC*>( this );
}
operator VkAmigoProfilingSubmitInfoSEC &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAmigoProfilingSubmitInfoSEC*>( this );
}
operator VkAmigoProfilingSubmitInfoSEC const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAmigoProfilingSubmitInfoSEC*>( this );
}
operator VkAmigoProfilingSubmitInfoSEC *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAmigoProfilingSubmitInfoSEC*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, firstDrawTimestamp, swapBufferTimestamp );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AmigoProfilingSubmitInfoSEC const & ) const = default;
#else
bool operator==( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( firstDrawTimestamp == rhs.firstDrawTimestamp )
&& ( swapBufferTimestamp == rhs.swapBufferTimestamp );
#endif
}
bool operator!=( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAmigoProfilingSubmitInfoSEC;
const void * pNext = {};
uint64_t firstDrawTimestamp = {};
uint64_t swapBufferTimestamp = {};
};
template <>
struct CppType<StructureType, StructureType::eAmigoProfilingSubmitInfoSEC>
{
using Type = AmigoProfilingSubmitInfoSEC;
};
// wrapper struct for struct VkComponentMapping, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkComponentMapping.html
struct ComponentMapping
{
using NativeType = VkComponentMapping;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ComponentMapping(VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity) VULKAN_HPP_NOEXCEPT
: r{ r_ }, g{ g_ }, b{ b_ }, a{ a_ }
{}
VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
: ComponentMapping( *reinterpret_cast<ComponentMapping const *>( &rhs ) )
{}
ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComponentMapping const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT
{
r = r_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT
{
g = g_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT
{
b = b_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT
{
a = a_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkComponentMapping*>( this );
}
operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkComponentMapping*>( this );
}
operator VkComponentMapping const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkComponentMapping*>( this );
}
operator VkComponentMapping *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkComponentMapping*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ComponentSwizzle const &, VULKAN_HPP_NAMESPACE::ComponentSwizzle const &, VULKAN_HPP_NAMESPACE::ComponentSwizzle const &, VULKAN_HPP_NAMESPACE::ComponentSwizzle const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( r, g, b, a );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ComponentMapping const & ) const = default;
#else
bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( r == rhs.r )
&& ( g == rhs.g )
&& ( b == rhs.b )
&& ( a == rhs.a );
#endif
}
bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
};
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
// wrapper struct for struct VkAndroidHardwareBufferFormatProperties2ANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferFormatProperties2ANDROID.html
struct AndroidHardwareBufferFormatProperties2ANDROID
{
using NativeType = VkAndroidHardwareBufferFormatProperties2ANDROID;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, format{ format_ }, externalFormat{ externalFormat_ }, formatFeatures{ formatFeatures_ }, samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ }, suggestedYcbcrModel{ suggestedYcbcrModel_ }, suggestedYcbcrRange{ suggestedYcbcrRange_ }, suggestedXChromaOffset{ suggestedXChromaOffset_ }, suggestedYChromaOffset{ suggestedYChromaOffset_ }
{}
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidHardwareBufferFormatProperties2ANDROID( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
: AndroidHardwareBufferFormatProperties2ANDROID( *reinterpret_cast<AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs ) )
{}
AndroidHardwareBufferFormatProperties2ANDROID & operator=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AndroidHardwareBufferFormatProperties2ANDROID & operator=( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs );
return *this;
}
operator VkAndroidHardwareBufferFormatProperties2ANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidHardwareBufferFormatProperties2ANDROID*>( this );
}
operator VkAndroidHardwareBufferFormatProperties2ANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAndroidHardwareBufferFormatProperties2ANDROID*>( this );
}
operator VkAndroidHardwareBufferFormatProperties2ANDROID const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAndroidHardwareBufferFormatProperties2ANDROID*>( this );
}
operator VkAndroidHardwareBufferFormatProperties2ANDROID *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAndroidHardwareBufferFormatProperties2ANDROID*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &, uint64_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, format, externalFormat, formatFeatures, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AndroidHardwareBufferFormatProperties2ANDROID const & ) const = default;
#else
bool operator==( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( format == rhs.format )
&& ( externalFormat == rhs.externalFormat )
&& ( formatFeatures == rhs.formatFeatures )
&& ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
&& ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
&& ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
&& ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
&& ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
#endif
}
bool operator!=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
uint64_t externalFormat = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures = {};
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
};
template <>
struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatProperties2ANDROID>
{
using Type = AndroidHardwareBufferFormatProperties2ANDROID;
};
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
// wrapper struct for struct VkAndroidHardwareBufferFormatPropertiesANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferFormatPropertiesANDROID.html
struct AndroidHardwareBufferFormatPropertiesANDROID
{
using NativeType = VkAndroidHardwareBufferFormatPropertiesANDROID;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, format{ format_ }, externalFormat{ externalFormat_ }, formatFeatures{ formatFeatures_ }, samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ }, suggestedYcbcrModel{ suggestedYcbcrModel_ }, suggestedYcbcrRange{ suggestedYcbcrRange_ }, suggestedXChromaOffset{ suggestedXChromaOffset_ }, suggestedYChromaOffset{ suggestedYChromaOffset_ }
{}
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
: AndroidHardwareBufferFormatPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs ) )
{}
AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs );
return *this;
}
operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
}
operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
}
operator VkAndroidHardwareBufferFormatPropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
}
operator VkAndroidHardwareBufferFormatPropertiesANDROID *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &, uint64_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, format, externalFormat, formatFeatures, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default;
#else
bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( format == rhs.format )
&& ( externalFormat == rhs.externalFormat )
&& ( formatFeatures == rhs.formatFeatures )
&& ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
&& ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
&& ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
&& ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
&& ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
#endif
}
bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
uint64_t externalFormat = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
};
template <>
struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatPropertiesANDROID>
{
using Type = AndroidHardwareBufferFormatPropertiesANDROID;
};
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
// wrapper struct for struct VkAndroidHardwareBufferFormatResolvePropertiesANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferFormatResolvePropertiesANDROID.html
struct AndroidHardwareBufferFormatResolvePropertiesANDROID
{
using NativeType = VkAndroidHardwareBufferFormatResolvePropertiesANDROID;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatResolvePropertiesANDROID(VULKAN_HPP_NAMESPACE::Format colorAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, colorAttachmentFormat{ colorAttachmentFormat_ }
{}
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatResolvePropertiesANDROID( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidHardwareBufferFormatResolvePropertiesANDROID( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
: AndroidHardwareBufferFormatResolvePropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferFormatResolvePropertiesANDROID const *>( &rhs ) )
{}
AndroidHardwareBufferFormatResolvePropertiesANDROID & operator=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AndroidHardwareBufferFormatResolvePropertiesANDROID & operator=( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID const *>( &rhs );
return *this;
}
operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidHardwareBufferFormatResolvePropertiesANDROID*>( this );
}
operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAndroidHardwareBufferFormatResolvePropertiesANDROID*>( this );
}
operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAndroidHardwareBufferFormatResolvePropertiesANDROID*>( this );
}
operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAndroidHardwareBufferFormatResolvePropertiesANDROID*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, colorAttachmentFormat );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AndroidHardwareBufferFormatResolvePropertiesANDROID const & ) const = default;
#else
bool operator==( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( colorAttachmentFormat == rhs.colorAttachmentFormat );
#endif
}
bool operator!=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Format colorAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID>
{
using Type = AndroidHardwareBufferFormatResolvePropertiesANDROID;
};
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
// wrapper struct for struct VkAndroidHardwareBufferPropertiesANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferPropertiesANDROID.html
struct AndroidHardwareBufferPropertiesANDROID
{
using NativeType = VkAndroidHardwareBufferPropertiesANDROID;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, allocationSize{ allocationSize_ }, memoryTypeBits{ memoryTypeBits_ }
{}
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
: AndroidHardwareBufferPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferPropertiesANDROID const *>( &rhs ) )
{}
AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const *>( &rhs );
return *this;
}
operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID*>( this );
}
operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( this );
}
operator VkAndroidHardwareBufferPropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID*>( this );
}
operator VkAndroidHardwareBufferPropertiesANDROID *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, allocationSize, memoryTypeBits );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default;
#else
bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( allocationSize == rhs.allocationSize )
&& ( memoryTypeBits == rhs.memoryTypeBits );
#endif
}
bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
uint32_t memoryTypeBits = {};
};
template <>
struct CppType<StructureType, StructureType::eAndroidHardwareBufferPropertiesANDROID>
{
using Type = AndroidHardwareBufferPropertiesANDROID;
};
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
// wrapper struct for struct VkAndroidHardwareBufferUsageANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferUsageANDROID.html
struct AndroidHardwareBufferUsageANDROID
{
using NativeType = VkAndroidHardwareBufferUsageANDROID;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID(uint64_t androidHardwareBufferUsage_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, androidHardwareBufferUsage{ androidHardwareBufferUsage_ }
{}
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
: AndroidHardwareBufferUsageANDROID( *reinterpret_cast<AndroidHardwareBufferUsageANDROID const *>( &rhs ) )
{}
AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const *>( &rhs );
return *this;
}
operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID*>( this );
}
operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID*>( this );
}
operator VkAndroidHardwareBufferUsageANDROID const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID*>( this );
}
operator VkAndroidHardwareBufferUsageANDROID *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAndroidHardwareBufferUsageANDROID*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, androidHardwareBufferUsage );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default;
#else
bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
#endif
}
bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID;
void * pNext = {};
uint64_t androidHardwareBufferUsage = {};
};
template <>
struct CppType<StructureType, StructureType::eAndroidHardwareBufferUsageANDROID>
{
using Type = AndroidHardwareBufferUsageANDROID;
};
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
// wrapper struct for struct VkAndroidSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidSurfaceCreateInfoKHR.html
struct AndroidSurfaceCreateInfoKHR
{
using NativeType = VkAndroidSurfaceCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, struct ANativeWindow * window_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, window{ window_ }
{}
VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AndroidSurfaceCreateInfoKHR( *reinterpret_cast<AndroidSurfaceCreateInfoKHR const *>( &rhs ) )
{}
AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT
{
window = window_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( this );
}
operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR*>( this );
}
operator VkAndroidSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( this );
}
operator VkAndroidSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAndroidSurfaceCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR const &, struct ANativeWindow * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, window );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default;
#else
bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( window == rhs.window );
#endif
}
bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {};
struct ANativeWindow * window = {};
};
template <>
struct CppType<StructureType, StructureType::eAndroidSurfaceCreateInfoKHR>
{
using Type = AndroidSurfaceCreateInfoKHR;
};
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
// wrapper struct for struct VkAntiLagPresentationInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAntiLagPresentationInfoAMD.html
struct AntiLagPresentationInfoAMD
{
using NativeType = VkAntiLagPresentationInfoAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAntiLagPresentationInfoAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AntiLagPresentationInfoAMD(VULKAN_HPP_NAMESPACE::AntiLagStageAMD stage_ = VULKAN_HPP_NAMESPACE::AntiLagStageAMD::eInput, uint64_t frameIndex_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stage{ stage_ }, frameIndex{ frameIndex_ }
{}
VULKAN_HPP_CONSTEXPR AntiLagPresentationInfoAMD( AntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AntiLagPresentationInfoAMD( VkAntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: AntiLagPresentationInfoAMD( *reinterpret_cast<AntiLagPresentationInfoAMD const *>( &rhs ) )
{}
AntiLagPresentationInfoAMD & operator=( AntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AntiLagPresentationInfoAMD & operator=( VkAntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setStage( VULKAN_HPP_NAMESPACE::AntiLagStageAMD stage_ ) VULKAN_HPP_NOEXCEPT
{
stage = stage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setFrameIndex( uint64_t frameIndex_ ) VULKAN_HPP_NOEXCEPT
{
frameIndex = frameIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAntiLagPresentationInfoAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAntiLagPresentationInfoAMD*>( this );
}
operator VkAntiLagPresentationInfoAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAntiLagPresentationInfoAMD*>( this );
}
operator VkAntiLagPresentationInfoAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAntiLagPresentationInfoAMD*>( this );
}
operator VkAntiLagPresentationInfoAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAntiLagPresentationInfoAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::AntiLagStageAMD const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stage, frameIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AntiLagPresentationInfoAMD const & ) const = default;
#else
bool operator==( AntiLagPresentationInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stage == rhs.stage )
&& ( frameIndex == rhs.frameIndex );
#endif
}
bool operator!=( AntiLagPresentationInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAntiLagPresentationInfoAMD;
void * pNext = {};
VULKAN_HPP_NAMESPACE::AntiLagStageAMD stage = VULKAN_HPP_NAMESPACE::AntiLagStageAMD::eInput;
uint64_t frameIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eAntiLagPresentationInfoAMD>
{
using Type = AntiLagPresentationInfoAMD;
};
// wrapper struct for struct VkAntiLagDataAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAntiLagDataAMD.html
struct AntiLagDataAMD
{
using NativeType = VkAntiLagDataAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAntiLagDataAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AntiLagDataAMD(VULKAN_HPP_NAMESPACE::AntiLagModeAMD mode_ = VULKAN_HPP_NAMESPACE::AntiLagModeAMD::eDriverControl, uint32_t maxFPS_ = {}, const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * pPresentationInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, mode{ mode_ }, maxFPS{ maxFPS_ }, pPresentationInfo{ pPresentationInfo_ }
{}
VULKAN_HPP_CONSTEXPR AntiLagDataAMD( AntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AntiLagDataAMD( VkAntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: AntiLagDataAMD( *reinterpret_cast<AntiLagDataAMD const *>( &rhs ) )
{}
AntiLagDataAMD & operator=( AntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AntiLagDataAMD & operator=( VkAntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AntiLagDataAMD const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setMode( VULKAN_HPP_NAMESPACE::AntiLagModeAMD mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setMaxFPS( uint32_t maxFPS_ ) VULKAN_HPP_NOEXCEPT
{
maxFPS = maxFPS_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setPPresentationInfo( const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * pPresentationInfo_ ) VULKAN_HPP_NOEXCEPT
{
pPresentationInfo = pPresentationInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAntiLagDataAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAntiLagDataAMD*>( this );
}
operator VkAntiLagDataAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAntiLagDataAMD*>( this );
}
operator VkAntiLagDataAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAntiLagDataAMD*>( this );
}
operator VkAntiLagDataAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAntiLagDataAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AntiLagModeAMD const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, mode, maxFPS, pPresentationInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AntiLagDataAMD const & ) const = default;
#else
bool operator==( AntiLagDataAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( mode == rhs.mode )
&& ( maxFPS == rhs.maxFPS )
&& ( pPresentationInfo == rhs.pPresentationInfo );
#endif
}
bool operator!=( AntiLagDataAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAntiLagDataAMD;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AntiLagModeAMD mode = VULKAN_HPP_NAMESPACE::AntiLagModeAMD::eDriverControl;
uint32_t maxFPS = {};
const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * pPresentationInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eAntiLagDataAMD>
{
using Type = AntiLagDataAMD;
};
// wrapper struct for struct VkApplicationInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkApplicationInfo.html
struct ApplicationInfo
{
using NativeType = VkApplicationInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ApplicationInfo(const char * pApplicationName_ = {}, uint32_t applicationVersion_ = {}, const char * pEngineName_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pApplicationName{ pApplicationName_ }, applicationVersion{ applicationVersion_ }, pEngineName{ pEngineName_ }, engineVersion{ engineVersion_ }, apiVersion{ apiVersion_ }
{}
VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ApplicationInfo( *reinterpret_cast<ApplicationInfo const *>( &rhs ) )
{}
ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) VULKAN_HPP_NOEXCEPT
{
pApplicationName = pApplicationName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
{
applicationVersion = applicationVersion_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPEngineName( const char * pEngineName_ ) VULKAN_HPP_NOEXCEPT
{
pEngineName = pEngineName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT
{
engineVersion = engineVersion_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT
{
apiVersion = apiVersion_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkApplicationInfo*>( this );
}
operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkApplicationInfo*>( this );
}
operator VkApplicationInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkApplicationInfo*>( this );
}
operator VkApplicationInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkApplicationInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, uint32_t const &, const char * const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pApplicationName, applicationVersion, pEngineName, engineVersion, apiVersion );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( pApplicationName != rhs.pApplicationName )
if ( auto cmp = strcmp( pApplicationName, rhs.pApplicationName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = applicationVersion <=> rhs.applicationVersion; cmp != 0 ) return cmp;
if ( pEngineName != rhs.pEngineName )
if ( auto cmp = strcmp( pEngineName, rhs.pEngineName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = engineVersion <=> rhs.engineVersion; cmp != 0 ) return cmp;
if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( ( pApplicationName == rhs.pApplicationName ) || ( strcmp( pApplicationName, rhs.pApplicationName ) == 0 ) )
&& ( applicationVersion == rhs.applicationVersion )
&& ( ( pEngineName == rhs.pEngineName ) || ( strcmp( pEngineName, rhs.pEngineName ) == 0 ) )
&& ( engineVersion == rhs.engineVersion )
&& ( apiVersion == rhs.apiVersion );
}
bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo;
const void * pNext = {};
const char * pApplicationName = {};
uint32_t applicationVersion = {};
const char * pEngineName = {};
uint32_t engineVersion = {};
uint32_t apiVersion = {};
};
template <>
struct CppType<StructureType, StructureType::eApplicationInfo>
{
using Type = ApplicationInfo;
};
// wrapper struct for struct VkAttachmentDescription, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescription.html
struct AttachmentDescription
{
using NativeType = VkAttachmentDescription;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AttachmentDescription(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
: flags{ flags_ }, format{ format_ }, samples{ samples_ }, loadOp{ loadOp_ }, storeOp{ storeOp_ }, stencilLoadOp{ stencilLoadOp_ }, stencilStoreOp{ stencilStoreOp_ }, initialLayout{ initialLayout_ }, finalLayout{ finalLayout_ }
{}
VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentDescription( *reinterpret_cast<AttachmentDescription const *>( &rhs ) )
{}
AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
samples = samples_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
{
loadOp = loadOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
{
storeOp = storeOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
{
stencilLoadOp = stencilLoadOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
{
stencilStoreOp = stencilStoreOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
{
initialLayout = initialLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
{
finalLayout = finalLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentDescription*>( this );
}
operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentDescription*>( this );
}
operator VkAttachmentDescription const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAttachmentDescription*>( this );
}
operator VkAttachmentDescription *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAttachmentDescription*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AttachmentDescription const & ) const = default;
#else
bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( flags == rhs.flags )
&& ( format == rhs.format )
&& ( samples == rhs.samples )
&& ( loadOp == rhs.loadOp )
&& ( storeOp == rhs.storeOp )
&& ( stencilLoadOp == rhs.stencilLoadOp )
&& ( stencilStoreOp == rhs.stencilStoreOp )
&& ( initialLayout == rhs.initialLayout )
&& ( finalLayout == rhs.finalLayout );
#endif
}
bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
// wrapper struct for struct VkAttachmentDescription2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescription2.html
struct AttachmentDescription2
{
using NativeType = VkAttachmentDescription2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AttachmentDescription2(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, format{ format_ }, samples{ samples_ }, loadOp{ loadOp_ }, storeOp{ storeOp_ }, stencilLoadOp{ stencilLoadOp_ }, stencilStoreOp{ stencilStoreOp_ }, initialLayout{ initialLayout_ }, finalLayout{ finalLayout_ }
{}
VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentDescription2( *reinterpret_cast<AttachmentDescription2 const *>( &rhs ) )
{}
AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
samples = samples_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
{
loadOp = loadOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
{
storeOp = storeOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
{
stencilLoadOp = stencilLoadOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
{
stencilStoreOp = stencilStoreOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
{
initialLayout = initialLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
{
finalLayout = finalLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentDescription2*>( this );
}
operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentDescription2*>( this );
}
operator VkAttachmentDescription2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAttachmentDescription2*>( this );
}
operator VkAttachmentDescription2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAttachmentDescription2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AttachmentDescription2 const & ) const = default;
#else
bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( format == rhs.format )
&& ( samples == rhs.samples )
&& ( loadOp == rhs.loadOp )
&& ( storeOp == rhs.storeOp )
&& ( stencilLoadOp == rhs.stencilLoadOp )
&& ( stencilStoreOp == rhs.stencilStoreOp )
&& ( initialLayout == rhs.initialLayout )
&& ( finalLayout == rhs.finalLayout );
#endif
}
bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::eAttachmentDescription2>
{
using Type = AttachmentDescription2;
};
using AttachmentDescription2KHR = AttachmentDescription2;
// wrapper struct for struct VkAttachmentDescriptionStencilLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescriptionStencilLayout.html
struct AttachmentDescriptionStencilLayout
{
using NativeType = VkAttachmentDescriptionStencilLayout;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stencilInitialLayout{ stencilInitialLayout_ }, stencilFinalLayout{ stencilFinalLayout_ }
{}
VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentDescriptionStencilLayout( *reinterpret_cast<AttachmentDescriptionStencilLayout const *>( &rhs ) )
{}
AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
{
stencilInitialLayout = stencilInitialLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
{
stencilFinalLayout = stencilFinalLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentDescriptionStencilLayout*>( this );
}
operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentDescriptionStencilLayout*>( this );
}
operator VkAttachmentDescriptionStencilLayout const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAttachmentDescriptionStencilLayout*>( this );
}
operator VkAttachmentDescriptionStencilLayout *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAttachmentDescriptionStencilLayout*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stencilInitialLayout, stencilFinalLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default;
#else
bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stencilInitialLayout == rhs.stencilInitialLayout )
&& ( stencilFinalLayout == rhs.stencilFinalLayout );
#endif
}
bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::eAttachmentDescriptionStencilLayout>
{
using Type = AttachmentDescriptionStencilLayout;
};
using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;
// wrapper struct for struct VkAttachmentFeedbackLoopInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentFeedbackLoopInfoEXT.html
struct AttachmentFeedbackLoopInfoEXT
{
using NativeType = VkAttachmentFeedbackLoopInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentFeedbackLoopInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AttachmentFeedbackLoopInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 feedbackLoopEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, feedbackLoopEnable{ feedbackLoopEnable_ }
{}
VULKAN_HPP_CONSTEXPR AttachmentFeedbackLoopInfoEXT( AttachmentFeedbackLoopInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentFeedbackLoopInfoEXT( VkAttachmentFeedbackLoopInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentFeedbackLoopInfoEXT( *reinterpret_cast<AttachmentFeedbackLoopInfoEXT const *>( &rhs ) )
{}
AttachmentFeedbackLoopInfoEXT & operator=( AttachmentFeedbackLoopInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AttachmentFeedbackLoopInfoEXT & operator=( VkAttachmentFeedbackLoopInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentFeedbackLoopInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AttachmentFeedbackLoopInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentFeedbackLoopInfoEXT & setFeedbackLoopEnable( VULKAN_HPP_NAMESPACE::Bool32 feedbackLoopEnable_ ) VULKAN_HPP_NOEXCEPT
{
feedbackLoopEnable = feedbackLoopEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAttachmentFeedbackLoopInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentFeedbackLoopInfoEXT*>( this );
}
operator VkAttachmentFeedbackLoopInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentFeedbackLoopInfoEXT*>( this );
}
operator VkAttachmentFeedbackLoopInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAttachmentFeedbackLoopInfoEXT*>( this );
}
operator VkAttachmentFeedbackLoopInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAttachmentFeedbackLoopInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, feedbackLoopEnable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AttachmentFeedbackLoopInfoEXT const & ) const = default;
#else
bool operator==( AttachmentFeedbackLoopInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( feedbackLoopEnable == rhs.feedbackLoopEnable );
#endif
}
bool operator!=( AttachmentFeedbackLoopInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentFeedbackLoopInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 feedbackLoopEnable = {};
};
template <>
struct CppType<StructureType, StructureType::eAttachmentFeedbackLoopInfoEXT>
{
using Type = AttachmentFeedbackLoopInfoEXT;
};
// wrapper struct for struct VkAttachmentReference, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentReference.html
struct AttachmentReference
{
using NativeType = VkAttachmentReference;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AttachmentReference(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
: attachment{ attachment_ }, layout{ layout_ }
{}
VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentReference( *reinterpret_cast<AttachmentReference const *>( &rhs ) )
{}
AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
{
attachment = attachment_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentReference*>( this );
}
operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentReference*>( this );
}
operator VkAttachmentReference const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAttachmentReference*>( this );
}
operator VkAttachmentReference *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAttachmentReference*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( attachment, layout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AttachmentReference const & ) const = default;
#else
bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( attachment == rhs.attachment )
&& ( layout == rhs.layout );
#endif
}
bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t attachment = {};
VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
// wrapper struct for struct VkAttachmentReference2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentReference2.html
struct AttachmentReference2
{
using NativeType = VkAttachmentReference2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AttachmentReference2(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, attachment{ attachment_ }, layout{ layout_ }, aspectMask{ aspectMask_ }
{}
VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentReference2( *reinterpret_cast<AttachmentReference2 const *>( &rhs ) )
{}
AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
{
attachment = attachment_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentReference2*>( this );
}
operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentReference2*>( this );
}
operator VkAttachmentReference2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAttachmentReference2*>( this );
}
operator VkAttachmentReference2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAttachmentReference2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageAspectFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, attachment, layout, aspectMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AttachmentReference2 const & ) const = default;
#else
bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( attachment == rhs.attachment )
&& ( layout == rhs.layout )
&& ( aspectMask == rhs.aspectMask );
#endif
}
bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2;
const void * pNext = {};
uint32_t attachment = {};
VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
};
template <>
struct CppType<StructureType, StructureType::eAttachmentReference2>
{
using Type = AttachmentReference2;
};
using AttachmentReference2KHR = AttachmentReference2;
// wrapper struct for struct VkAttachmentReferenceStencilLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentReferenceStencilLayout.html
struct AttachmentReferenceStencilLayout
{
using NativeType = VkAttachmentReferenceStencilLayout;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stencilLayout{ stencilLayout_ }
{}
VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentReferenceStencilLayout( *reinterpret_cast<AttachmentReferenceStencilLayout const *>( &rhs ) )
{}
AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
{
stencilLayout = stencilLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentReferenceStencilLayout*>( this );
}
operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentReferenceStencilLayout*>( this );
}
operator VkAttachmentReferenceStencilLayout const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAttachmentReferenceStencilLayout*>( this );
}
operator VkAttachmentReferenceStencilLayout *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAttachmentReferenceStencilLayout*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stencilLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default;
#else
bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stencilLayout == rhs.stencilLayout );
#endif
}
bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::eAttachmentReferenceStencilLayout>
{
using Type = AttachmentReferenceStencilLayout;
};
using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;
// wrapper struct for struct VkAttachmentSampleCountInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentSampleCountInfoAMD.html
struct AttachmentSampleCountInfoAMD
{
using NativeType = VkAttachmentSampleCountInfoAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentSampleCountInfoAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD(uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, colorAttachmentCount{ colorAttachmentCount_ }, pColorAttachmentSamples{ pColorAttachmentSamples_ }, depthStencilAttachmentSamples{ depthStencilAttachmentSamples_ }
{}
VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentSampleCountInfoAMD( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentSampleCountInfoAMD( *reinterpret_cast<AttachmentSampleCountInfoAMD const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AttachmentSampleCountInfoAMD( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleCountFlagBits> const & colorAttachmentSamples_, VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr )
: pNext( pNext_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachmentSamples_.size() ) ), pColorAttachmentSamples( colorAttachmentSamples_.data() ), depthStencilAttachmentSamples( depthStencilAttachmentSamples_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
AttachmentSampleCountInfoAMD & operator=( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AttachmentSampleCountInfoAMD & operator=( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPColorAttachmentSamples( const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachmentSamples = pColorAttachmentSamples_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AttachmentSampleCountInfoAMD & setColorAttachmentSamples( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleCountFlagBits> const & colorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentSamples_.size() );
pColorAttachmentSamples = colorAttachmentSamples_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setDepthStencilAttachmentSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
{
depthStencilAttachmentSamples = depthStencilAttachmentSamples_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAttachmentSampleCountInfoAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentSampleCountInfoAMD*>( this );
}
operator VkAttachmentSampleCountInfoAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentSampleCountInfoAMD*>( this );
}
operator VkAttachmentSampleCountInfoAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAttachmentSampleCountInfoAMD*>( this );
}
operator VkAttachmentSampleCountInfoAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAttachmentSampleCountInfoAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentSamples, depthStencilAttachmentSamples );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AttachmentSampleCountInfoAMD const & ) const = default;
#else
bool operator==( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
&& ( pColorAttachmentSamples == rhs.pColorAttachmentSamples )
&& ( depthStencilAttachmentSamples == rhs.depthStencilAttachmentSamples );
#endif
}
bool operator!=( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentSampleCountInfoAMD;
const void * pNext = {};
uint32_t colorAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples = {};
VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
};
template <>
struct CppType<StructureType, StructureType::eAttachmentSampleCountInfoAMD>
{
using Type = AttachmentSampleCountInfoAMD;
};
using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD;
// wrapper struct for struct VkExtent2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExtent2D.html
struct Extent2D
{
using NativeType = VkExtent2D;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR Extent2D(uint32_t width_ = {}, uint32_t height_ = {}) VULKAN_HPP_NOEXCEPT
: width{ width_ }, height{ height_ }
{}
VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
: Extent2D( *reinterpret_cast<Extent2D const *>( &rhs ) )
{}
Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent2D const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
{
width = width_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
{
height = height_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExtent2D*>( this );
}
operator VkExtent2D &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExtent2D*>( this );
}
operator VkExtent2D const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExtent2D*>( this );
}
operator VkExtent2D *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExtent2D*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( width, height );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( Extent2D const & ) const = default;
#else
bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( width == rhs.width )
&& ( height == rhs.height );
#endif
}
bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t width = {};
uint32_t height = {};
};
// wrapper struct for struct VkSampleLocationEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSampleLocationEXT.html
struct SampleLocationEXT
{
using NativeType = VkSampleLocationEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SampleLocationEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT
: x{ x_ }, y{ y_ }
{}
VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SampleLocationEXT( *reinterpret_cast<SampleLocationEXT const *>( &rhs ) )
{}
SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
{
x = x_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
{
y = y_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSampleLocationEXT*>( this );
}
operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSampleLocationEXT*>( this );
}
operator VkSampleLocationEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSampleLocationEXT*>( this );
}
operator VkSampleLocationEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSampleLocationEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<float const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( x, y );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SampleLocationEXT const & ) const = default;
#else
bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( x == rhs.x )
&& ( y == rhs.y );
#endif
}
bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
float x = {};
float y = {};
};
// wrapper struct for struct VkSampleLocationsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSampleLocationsInfoEXT.html
struct SampleLocationsInfoEXT
{
using NativeType = VkSampleLocationsInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, uint32_t sampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, sampleLocationsPerPixel{ sampleLocationsPerPixel_ }, sampleLocationGridSize{ sampleLocationGridSize_ }, sampleLocationsCount{ sampleLocationsCount_ }, pSampleLocations{ pSampleLocations_ }
{}
VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SampleLocationsInfoEXT( *reinterpret_cast<SampleLocationsInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_, const void * pNext_ = nullptr )
: pNext( pNext_ ), sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( static_cast<uint32_t>( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsPerPixel = sampleLocationsPerPixel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationGridSize = sampleLocationGridSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsCount = sampleLocationsCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
pSampleLocations = pSampleLocations_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SampleLocationsInfoEXT & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsCount = static_cast<uint32_t>( sampleLocations_.size() );
pSampleLocations = sampleLocations_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSampleLocationsInfoEXT*>( this );
}
operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSampleLocationsInfoEXT*>( this );
}
operator VkSampleLocationsInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSampleLocationsInfoEXT*>( this );
}
operator VkSampleLocationsInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSampleLocationsInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SampleLocationEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, sampleLocationsPerPixel, sampleLocationGridSize, sampleLocationsCount, pSampleLocations );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SampleLocationsInfoEXT const & ) const = default;
#else
bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel )
&& ( sampleLocationGridSize == rhs.sampleLocationGridSize )
&& ( sampleLocationsCount == rhs.sampleLocationsCount )
&& ( pSampleLocations == rhs.pSampleLocations );
#endif
}
bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {};
uint32_t sampleLocationsCount = {};
const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations = {};
};
template <>
struct CppType<StructureType, StructureType::eSampleLocationsInfoEXT>
{
using Type = SampleLocationsInfoEXT;
};
// wrapper struct for struct VkAttachmentSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentSampleLocationsEXT.html
struct AttachmentSampleLocationsEXT
{
using NativeType = VkAttachmentSampleLocationsEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT(uint32_t attachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
: attachmentIndex{ attachmentIndex_ }, sampleLocationsInfo{ sampleLocationsInfo_ }
{}
VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentSampleLocationsEXT( *reinterpret_cast<AttachmentSampleLocationsEXT const *>( &rhs ) )
{}
AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT
{
attachmentIndex = attachmentIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsInfo = sampleLocationsInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentSampleLocationsEXT*>( this );
}
operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentSampleLocationsEXT*>( this );
}
operator VkAttachmentSampleLocationsEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkAttachmentSampleLocationsEXT*>( this );
}
operator VkAttachmentSampleLocationsEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkAttachmentSampleLocationsEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( attachmentIndex, sampleLocationsInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default;
#else
bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( attachmentIndex == rhs.attachmentIndex )
&& ( sampleLocationsInfo == rhs.sampleLocationsInfo );
#endif
}
bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t attachmentIndex = {};
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
};
// wrapper struct for struct VkBaseInStructure, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBaseInStructure.html
struct BaseInStructure
{
using NativeType = VkBaseInStructure;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
BaseInStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: sType{ sType_ }, pNext{ pNext_ }
{}
BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
: BaseInStructure( *reinterpret_cast<BaseInStructure const *>( &rhs ) )
{}
BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseInStructure const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBaseInStructure*>( this );
}
operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBaseInStructure*>( this );
}
operator VkBaseInStructure const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBaseInStructure*>( this );
}
operator VkBaseInStructure *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBaseInStructure*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const struct VULKAN_HPP_NAMESPACE::BaseInStructure * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BaseInStructure const & ) const = default;
#else
bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext );
#endif
}
bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext = {};
};
// wrapper struct for struct VkBaseOutStructure, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBaseOutStructure.html
struct BaseOutStructure
{
using NativeType = VkBaseOutStructure;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
BaseOutStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: sType{ sType_ }, pNext{ pNext_ }
{}
BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
: BaseOutStructure( *reinterpret_cast<BaseOutStructure const *>( &rhs ) )
{}
BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseOutStructure const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBaseOutStructure*>( this );
}
operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBaseOutStructure*>( this );
}
operator VkBaseOutStructure const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBaseOutStructure*>( this );
}
operator VkBaseOutStructure *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBaseOutStructure*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, struct VULKAN_HPP_NAMESPACE::BaseOutStructure * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BaseOutStructure const & ) const = default;
#else
bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext );
#endif
}
bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext = {};
};
// wrapper struct for struct VkBindAccelerationStructureMemoryInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindAccelerationStructureMemoryInfoNV.html
struct BindAccelerationStructureMemoryInfoNV
{
using NativeType = VkBindAccelerationStructureMemoryInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, accelerationStructure{ accelerationStructure_ }, memory{ memory_ }, memoryOffset{ memoryOffset_ }, deviceIndexCount{ deviceIndexCount_ }, pDeviceIndices{ pDeviceIndices_ }
{}
VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: BindAccelerationStructureMemoryInfoNV( *reinterpret_cast<BindAccelerationStructureMemoryInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_, VULKAN_HPP_NAMESPACE::DeviceMemory memory_, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, const void * pNext_ = nullptr )
: pNext( pNext_ ), accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindAccelerationStructureMemoryInfoNV & operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = deviceIndexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceIndices = pDeviceIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindAccelerationStructureMemoryInfoNV & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
pDeviceIndices = deviceIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( this );
}
operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV*>( this );
}
operator VkBindAccelerationStructureMemoryInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( this );
}
operator VkBindAccelerationStructureMemoryInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureNV const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, accelerationStructure, memory, memoryOffset, deviceIndexCount, pDeviceIndices );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindAccelerationStructureMemoryInfoNV const & ) const = default;
#else
bool operator==( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( accelerationStructure == rhs.accelerationStructure )
&& ( memory == rhs.memory )
&& ( memoryOffset == rhs.memoryOffset )
&& ( deviceIndexCount == rhs.deviceIndexCount )
&& ( pDeviceIndices == rhs.pDeviceIndices );
#endif
}
bool operator!=( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
uint32_t deviceIndexCount = {};
const uint32_t * pDeviceIndices = {};
};
template <>
struct CppType<StructureType, StructureType::eBindAccelerationStructureMemoryInfoNV>
{
using Type = BindAccelerationStructureMemoryInfoNV;
};
// wrapper struct for struct VkBindBufferMemoryDeviceGroupInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindBufferMemoryDeviceGroupInfo.html
struct BindBufferMemoryDeviceGroupInfo
{
using NativeType = VkBindBufferMemoryDeviceGroupInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceIndexCount{ deviceIndexCount_ }, pDeviceIndices{ pDeviceIndices_ }
{}
VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BindBufferMemoryDeviceGroupInfo( *reinterpret_cast<BindBufferMemoryDeviceGroupInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindBufferMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, const void * pNext_ = nullptr )
: pNext( pNext_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = deviceIndexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceIndices = pDeviceIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindBufferMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
pDeviceIndices = deviceIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo*>( this );
}
operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo*>( this );
}
operator VkBindBufferMemoryDeviceGroupInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo*>( this );
}
operator VkBindBufferMemoryDeviceGroupInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default;
#else
bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceIndexCount == rhs.deviceIndexCount )
&& ( pDeviceIndices == rhs.pDeviceIndices );
#endif
}
bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
const void * pNext = {};
uint32_t deviceIndexCount = {};
const uint32_t * pDeviceIndices = {};
};
template <>
struct CppType<StructureType, StructureType::eBindBufferMemoryDeviceGroupInfo>
{
using Type = BindBufferMemoryDeviceGroupInfo;
};
using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
// wrapper struct for struct VkBindBufferMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindBufferMemoryInfo.html
struct BindBufferMemoryInfo
{
using NativeType = VkBindBufferMemoryInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, buffer{ buffer_ }, memory{ memory_ }, memoryOffset{ memoryOffset_ }
{}
VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BindBufferMemoryInfo( *reinterpret_cast<BindBufferMemoryInfo const *>( &rhs ) )
{}
BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindBufferMemoryInfo*>( this );
}
operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindBufferMemoryInfo*>( this );
}
operator VkBindBufferMemoryInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindBufferMemoryInfo*>( this );
}
operator VkBindBufferMemoryInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindBufferMemoryInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, buffer, memory, memoryOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindBufferMemoryInfo const & ) const = default;
#else
bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( buffer == rhs.buffer )
&& ( memory == rhs.memory )
&& ( memoryOffset == rhs.memoryOffset );
#endif
}
bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
};
template <>
struct CppType<StructureType, StructureType::eBindBufferMemoryInfo>
{
using Type = BindBufferMemoryInfo;
};
using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
// wrapper struct for struct VkBindDescriptorBufferEmbeddedSamplersInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindDescriptorBufferEmbeddedSamplersInfoEXT.html
struct BindDescriptorBufferEmbeddedSamplersInfoEXT
{
using NativeType = VkBindDescriptorBufferEmbeddedSamplersInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindDescriptorBufferEmbeddedSamplersInfoEXT(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, uint32_t set_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stageFlags{ stageFlags_ }, layout{ layout_ }, set{ set_ }
{}
VULKAN_HPP_CONSTEXPR BindDescriptorBufferEmbeddedSamplersInfoEXT( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindDescriptorBufferEmbeddedSamplersInfoEXT( VkBindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: BindDescriptorBufferEmbeddedSamplersInfoEXT( *reinterpret_cast<BindDescriptorBufferEmbeddedSamplersInfoEXT const *>( &rhs ) )
{}
BindDescriptorBufferEmbeddedSamplersInfoEXT & operator=( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindDescriptorBufferEmbeddedSamplersInfoEXT & operator=( VkBindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
{
stageFlags = stageFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
{
set = set_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT*>( this );
}
operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindDescriptorBufferEmbeddedSamplersInfoEXT*>( this );
}
operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT*>( this );
}
operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindDescriptorBufferEmbeddedSamplersInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stageFlags, layout, set );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindDescriptorBufferEmbeddedSamplersInfoEXT const & ) const = default;
#else
bool operator==( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stageFlags == rhs.stageFlags )
&& ( layout == rhs.layout )
&& ( set == rhs.set );
#endif
}
bool operator!=( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
uint32_t set = {};
};
template <>
struct CppType<StructureType, StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT>
{
using Type = BindDescriptorBufferEmbeddedSamplersInfoEXT;
};
// wrapper struct for struct VkBindDescriptorSetsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindDescriptorSetsInfo.html
struct BindDescriptorSetsInfo
{
using NativeType = VkBindDescriptorSetsInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDescriptorSetsInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfo(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, uint32_t firstSet_ = {}, uint32_t descriptorSetCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets_ = {}, uint32_t dynamicOffsetCount_ = {}, const uint32_t * pDynamicOffsets_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stageFlags{ stageFlags_ }, layout{ layout_ }, firstSet{ firstSet_ }, descriptorSetCount{ descriptorSetCount_ }, pDescriptorSets{ pDescriptorSets_ }, dynamicOffsetCount{ dynamicOffsetCount_ }, pDynamicOffsets{ pDynamicOffsets_ }
{}
VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfo( BindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindDescriptorSetsInfo( VkBindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BindDescriptorSetsInfo( *reinterpret_cast<BindDescriptorSetsInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindDescriptorSetsInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::PipelineLayout layout_, uint32_t firstSet_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & dynamicOffsets_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), stageFlags( stageFlags_ ), layout( layout_ ), firstSet( firstSet_ ), descriptorSetCount( static_cast<uint32_t>( descriptorSets_.size() ) ), pDescriptorSets( descriptorSets_.data() ), dynamicOffsetCount( static_cast<uint32_t>( dynamicOffsets_.size() ) ), pDynamicOffsets( dynamicOffsets_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
BindDescriptorSetsInfo & operator=( BindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindDescriptorSetsInfo & operator=( VkBindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
{
stageFlags = stageFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setFirstSet( uint32_t firstSet_ ) VULKAN_HPP_NOEXCEPT
{
firstSet = firstSet_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetCount = descriptorSetCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets_ ) VULKAN_HPP_NOEXCEPT
{
pDescriptorSets = pDescriptorSets_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindDescriptorSetsInfo & setDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetCount = static_cast<uint32_t>( descriptorSets_.size() );
pDescriptorSets = descriptorSets_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setDynamicOffsetCount( uint32_t dynamicOffsetCount_ ) VULKAN_HPP_NOEXCEPT
{
dynamicOffsetCount = dynamicOffsetCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPDynamicOffsets( const uint32_t * pDynamicOffsets_ ) VULKAN_HPP_NOEXCEPT
{
pDynamicOffsets = pDynamicOffsets_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindDescriptorSetsInfo & setDynamicOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & dynamicOffsets_ ) VULKAN_HPP_NOEXCEPT
{
dynamicOffsetCount = static_cast<uint32_t>( dynamicOffsets_.size() );
pDynamicOffsets = dynamicOffsets_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindDescriptorSetsInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindDescriptorSetsInfo*>( this );
}
operator VkBindDescriptorSetsInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindDescriptorSetsInfo*>( this );
}
operator VkBindDescriptorSetsInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindDescriptorSetsInfo*>( this );
}
operator VkBindDescriptorSetsInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindDescriptorSetsInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorSet * const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stageFlags, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindDescriptorSetsInfo const & ) const = default;
#else
bool operator==( BindDescriptorSetsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stageFlags == rhs.stageFlags )
&& ( layout == rhs.layout )
&& ( firstSet == rhs.firstSet )
&& ( descriptorSetCount == rhs.descriptorSetCount )
&& ( pDescriptorSets == rhs.pDescriptorSets )
&& ( dynamicOffsetCount == rhs.dynamicOffsetCount )
&& ( pDynamicOffsets == rhs.pDynamicOffsets );
#endif
}
bool operator!=( BindDescriptorSetsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindDescriptorSetsInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
uint32_t firstSet = {};
uint32_t descriptorSetCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets = {};
uint32_t dynamicOffsetCount = {};
const uint32_t * pDynamicOffsets = {};
};
template <>
struct CppType<StructureType, StructureType::eBindDescriptorSetsInfo>
{
using Type = BindDescriptorSetsInfo;
};
using BindDescriptorSetsInfoKHR = BindDescriptorSetsInfo;
// wrapper struct for struct VkOffset2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOffset2D.html
struct Offset2D
{
using NativeType = VkOffset2D;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR Offset2D(int32_t x_ = {}, int32_t y_ = {}) VULKAN_HPP_NOEXCEPT
: x{ x_ }, y{ y_ }
{}
VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
: Offset2D( *reinterpret_cast<Offset2D const *>( &rhs ) )
{}
Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
{
x = x_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
{
y = y_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOffset2D*>( this );
}
operator VkOffset2D &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkOffset2D*>( this );
}
operator VkOffset2D const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkOffset2D*>( this );
}
operator VkOffset2D *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkOffset2D*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<int32_t const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( x, y );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( Offset2D const & ) const = default;
#else
bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( x == rhs.x )
&& ( y == rhs.y );
#endif
}
bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
int32_t x = {};
int32_t y = {};
};
// wrapper struct for struct VkRect2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRect2D.html
struct Rect2D
{
using NativeType = VkRect2D;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR Rect2D(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}) VULKAN_HPP_NOEXCEPT
: offset{ offset_ }, extent{ extent_ }
{}
VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
: Rect2D( *reinterpret_cast<Rect2D const *>( &rhs ) )
{}
Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRect2D*>( this );
}
operator VkRect2D &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRect2D*>( this );
}
operator VkRect2D const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRect2D*>( this );
}
operator VkRect2D *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRect2D*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( offset, extent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( Rect2D const & ) const = default;
#else
bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( offset == rhs.offset )
&& ( extent == rhs.extent );
#endif
}
bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Offset2D offset = {};
VULKAN_HPP_NAMESPACE::Extent2D extent = {};
};
// wrapper struct for struct VkBindImageMemoryDeviceGroupInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImageMemoryDeviceGroupInfo.html
struct BindImageMemoryDeviceGroupInfo
{
using NativeType = VkBindImageMemoryDeviceGroupInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, uint32_t splitInstanceBindRegionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceIndexCount{ deviceIndexCount_ }, pDeviceIndices{ pDeviceIndices_ }, splitInstanceBindRegionCount{ splitInstanceBindRegionCount_ }, pSplitInstanceBindRegions{ pSplitInstanceBindRegions_ }
{}
VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BindImageMemoryDeviceGroupInfo( *reinterpret_cast<BindImageMemoryDeviceGroupInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindImageMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ), splitInstanceBindRegionCount( static_cast<uint32_t>( splitInstanceBindRegions_.size() ) ), pSplitInstanceBindRegions( splitInstanceBindRegions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = deviceIndexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceIndices = pDeviceIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindImageMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
pDeviceIndices = deviceIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
{
splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
{
pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
{
splitInstanceBindRegionCount = static_cast<uint32_t>( splitInstanceBindRegions_.size() );
pSplitInstanceBindRegions = splitInstanceBindRegions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo*>( this );
}
operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo*>( this );
}
operator VkBindImageMemoryDeviceGroupInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo*>( this );
}
operator VkBindImageMemoryDeviceGroupInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindImageMemoryDeviceGroupInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices, splitInstanceBindRegionCount, pSplitInstanceBindRegions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default;
#else
bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceIndexCount == rhs.deviceIndexCount )
&& ( pDeviceIndices == rhs.pDeviceIndices )
&& ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount )
&& ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
#endif
}
bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
const void * pNext = {};
uint32_t deviceIndexCount = {};
const uint32_t * pDeviceIndices = {};
uint32_t splitInstanceBindRegionCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions = {};
};
template <>
struct CppType<StructureType, StructureType::eBindImageMemoryDeviceGroupInfo>
{
using Type = BindImageMemoryDeviceGroupInfo;
};
using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
// wrapper struct for struct VkBindImageMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImageMemoryInfo.html
struct BindImageMemoryInfo
{
using NativeType = VkBindImageMemoryInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindImageMemoryInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, image{ image_ }, memory{ memory_ }, memoryOffset{ memoryOffset_ }
{}
VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BindImageMemoryInfo( *reinterpret_cast<BindImageMemoryInfo const *>( &rhs ) )
{}
BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindImageMemoryInfo*>( this );
}
operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindImageMemoryInfo*>( this );
}
operator VkBindImageMemoryInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindImageMemoryInfo*>( this );
}
operator VkBindImageMemoryInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindImageMemoryInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, image, memory, memoryOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindImageMemoryInfo const & ) const = default;
#else
bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( image == rhs.image )
&& ( memory == rhs.memory )
&& ( memoryOffset == rhs.memoryOffset );
#endif
}
bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
};
template <>
struct CppType<StructureType, StructureType::eBindImageMemoryInfo>
{
using Type = BindImageMemoryInfo;
};
using BindImageMemoryInfoKHR = BindImageMemoryInfo;
// wrapper struct for struct VkBindImageMemorySwapchainInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImageMemorySwapchainInfoKHR.html
struct BindImageMemorySwapchainInfoKHR
{
using NativeType = VkBindImageMemorySwapchainInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, swapchain{ swapchain_ }, imageIndex{ imageIndex_ }
{}
VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: BindImageMemorySwapchainInfoKHR( *reinterpret_cast<BindImageMemorySwapchainInfoKHR const *>( &rhs ) )
{}
BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
{
swapchain = swapchain_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT
{
imageIndex = imageIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>( this );
}
operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR*>( this );
}
operator VkBindImageMemorySwapchainInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>( this );
}
operator VkBindImageMemorySwapchainInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindImageMemorySwapchainInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, swapchain, imageIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default;
#else
bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchain == rhs.swapchain )
&& ( imageIndex == rhs.imageIndex );
#endif
}
bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
uint32_t imageIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eBindImageMemorySwapchainInfoKHR>
{
using Type = BindImageMemorySwapchainInfoKHR;
};
// wrapper struct for struct VkBindImagePlaneMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImagePlaneMemoryInfo.html
struct BindImagePlaneMemoryInfo
{
using NativeType = VkBindImagePlaneMemoryInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, planeAspect{ planeAspect_ }
{}
VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BindImagePlaneMemoryInfo( *reinterpret_cast<BindImagePlaneMemoryInfo const *>( &rhs ) )
{}
BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
{
planeAspect = planeAspect_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindImagePlaneMemoryInfo*>( this );
}
operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindImagePlaneMemoryInfo*>( this );
}
operator VkBindImagePlaneMemoryInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindImagePlaneMemoryInfo*>( this );
}
operator VkBindImagePlaneMemoryInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindImagePlaneMemoryInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, planeAspect );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default;
#else
bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( planeAspect == rhs.planeAspect );
#endif
}
bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
};
template <>
struct CppType<StructureType, StructureType::eBindImagePlaneMemoryInfo>
{
using Type = BindImagePlaneMemoryInfo;
};
using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
// wrapper struct for struct VkBindIndexBufferIndirectCommandEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindIndexBufferIndirectCommandEXT.html
struct BindIndexBufferIndirectCommandEXT
{
using NativeType = VkBindIndexBufferIndirectCommandEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandEXT(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16) VULKAN_HPP_NOEXCEPT
: bufferAddress{ bufferAddress_ }, size{ size_ }, indexType{ indexType_ }
{}
VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandEXT( BindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindIndexBufferIndirectCommandEXT( VkBindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: BindIndexBufferIndirectCommandEXT( *reinterpret_cast<BindIndexBufferIndirectCommandEXT const *>( &rhs ) )
{}
BindIndexBufferIndirectCommandEXT & operator=( BindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindIndexBufferIndirectCommandEXT & operator=( VkBindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferAddress = bufferAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindIndexBufferIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindIndexBufferIndirectCommandEXT*>( this );
}
operator VkBindIndexBufferIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindIndexBufferIndirectCommandEXT*>( this );
}
operator VkBindIndexBufferIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindIndexBufferIndirectCommandEXT*>( this );
}
operator VkBindIndexBufferIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindIndexBufferIndirectCommandEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndexType const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( bufferAddress, size, indexType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindIndexBufferIndirectCommandEXT const & ) const = default;
#else
bool operator==( BindIndexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( bufferAddress == rhs.bufferAddress )
&& ( size == rhs.size )
&& ( indexType == rhs.indexType );
#endif
}
bool operator!=( BindIndexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
uint32_t size = {};
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
};
// wrapper struct for struct VkBindIndexBufferIndirectCommandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindIndexBufferIndirectCommandNV.html
struct BindIndexBufferIndirectCommandNV
{
using NativeType = VkBindIndexBufferIndirectCommandNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16) VULKAN_HPP_NOEXCEPT
: bufferAddress{ bufferAddress_ }, size{ size_ }, indexType{ indexType_ }
{}
VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: BindIndexBufferIndirectCommandNV( *reinterpret_cast<BindIndexBufferIndirectCommandNV const *>( &rhs ) )
{}
BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferAddress = bufferAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindIndexBufferIndirectCommandNV*>( this );
}
operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindIndexBufferIndirectCommandNV*>( this );
}
operator VkBindIndexBufferIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindIndexBufferIndirectCommandNV*>( this );
}
operator VkBindIndexBufferIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindIndexBufferIndirectCommandNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndexType const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( bufferAddress, size, indexType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default;
#else
bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( bufferAddress == rhs.bufferAddress )
&& ( size == rhs.size )
&& ( indexType == rhs.indexType );
#endif
}
bool operator!=( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
uint32_t size = {};
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
};
// wrapper struct for struct VkBindMemoryStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindMemoryStatus.html
struct BindMemoryStatus
{
using NativeType = VkBindMemoryStatus;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindMemoryStatus;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindMemoryStatus(VULKAN_HPP_NAMESPACE::Result * pResult_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pResult{ pResult_ }
{}
VULKAN_HPP_CONSTEXPR BindMemoryStatus( BindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindMemoryStatus( VkBindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT
: BindMemoryStatus( *reinterpret_cast<BindMemoryStatus const *>( &rhs ) )
{}
BindMemoryStatus & operator=( BindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindMemoryStatus & operator=( VkBindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindMemoryStatus const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus & setPResult( VULKAN_HPP_NAMESPACE::Result * pResult_ ) VULKAN_HPP_NOEXCEPT
{
pResult = pResult_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindMemoryStatus const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindMemoryStatus*>( this );
}
operator VkBindMemoryStatus &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindMemoryStatus*>( this );
}
operator VkBindMemoryStatus const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindMemoryStatus*>( this );
}
operator VkBindMemoryStatus *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindMemoryStatus*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Result * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pResult );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindMemoryStatus const & ) const = default;
#else
bool operator==( BindMemoryStatus const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pResult == rhs.pResult );
#endif
}
bool operator!=( BindMemoryStatus const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindMemoryStatus;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Result * pResult = {};
};
template <>
struct CppType<StructureType, StructureType::eBindMemoryStatus>
{
using Type = BindMemoryStatus;
};
using BindMemoryStatusKHR = BindMemoryStatus;
// wrapper struct for struct VkBindPipelineIndirectCommandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindPipelineIndirectCommandNV.html
struct BindPipelineIndirectCommandNV
{
using NativeType = VkBindPipelineIndirectCommandNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ = {}) VULKAN_HPP_NOEXCEPT
: pipelineAddress{ pipelineAddress_ }
{}
VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindPipelineIndirectCommandNV( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: BindPipelineIndirectCommandNV( *reinterpret_cast<BindPipelineIndirectCommandNV const *>( &rhs ) )
{}
BindPipelineIndirectCommandNV & operator=( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindPipelineIndirectCommandNV & operator=( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindPipelineIndirectCommandNV & setPipelineAddress( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ ) VULKAN_HPP_NOEXCEPT
{
pipelineAddress = pipelineAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindPipelineIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindPipelineIndirectCommandNV*>( this );
}
operator VkBindPipelineIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindPipelineIndirectCommandNV*>( this );
}
operator VkBindPipelineIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindPipelineIndirectCommandNV*>( this );
}
operator VkBindPipelineIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindPipelineIndirectCommandNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( pipelineAddress );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindPipelineIndirectCommandNV const & ) const = default;
#else
bool operator==( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( pipelineAddress == rhs.pipelineAddress );
#endif
}
bool operator!=( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress = {};
};
// wrapper struct for struct VkBindShaderGroupIndirectCommandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindShaderGroupIndirectCommandNV.html
struct BindShaderGroupIndirectCommandNV
{
using NativeType = VkBindShaderGroupIndirectCommandNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV(uint32_t groupIndex_ = {}) VULKAN_HPP_NOEXCEPT
: groupIndex{ groupIndex_ }
{}
VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: BindShaderGroupIndirectCommandNV( *reinterpret_cast<BindShaderGroupIndirectCommandNV const *>( &rhs ) )
{}
BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT
{
groupIndex = groupIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindShaderGroupIndirectCommandNV*>( this );
}
operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindShaderGroupIndirectCommandNV*>( this );
}
operator VkBindShaderGroupIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindShaderGroupIndirectCommandNV*>( this );
}
operator VkBindShaderGroupIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindShaderGroupIndirectCommandNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( groupIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default;
#else
bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( groupIndex == rhs.groupIndex );
#endif
}
bool operator!=( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t groupIndex = {};
};
// wrapper struct for struct VkSparseMemoryBind, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseMemoryBind.html
struct SparseMemoryBind
{
using NativeType = VkSparseMemoryBind;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SparseMemoryBind(VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
: resourceOffset{ resourceOffset_ }, size{ size_ }, memory{ memory_ }, memoryOffset{ memoryOffset_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseMemoryBind( *reinterpret_cast<SparseMemoryBind const *>( &rhs ) )
{}
SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseMemoryBind const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
{
resourceOffset = resourceOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseMemoryBind*>( this );
}
operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseMemoryBind*>( this );
}
operator VkSparseMemoryBind const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSparseMemoryBind*>( this );
}
operator VkSparseMemoryBind *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSparseMemoryBind*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( resourceOffset, size, memory, memoryOffset, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SparseMemoryBind const & ) const = default;
#else
bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( resourceOffset == rhs.resourceOffset )
&& ( size == rhs.size )
&& ( memory == rhs.memory )
&& ( memoryOffset == rhs.memoryOffset )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
};
// wrapper struct for struct VkSparseBufferMemoryBindInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseBufferMemoryBindInfo.html
struct SparseBufferMemoryBindInfo
{
using NativeType = VkSparseBufferMemoryBindInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {}) VULKAN_HPP_NOEXCEPT
: buffer{ buffer_ }, bindCount{ bindCount_ }, pBinds{ pBinds_ }
{}
VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseBufferMemoryBindInfo( *reinterpret_cast<SparseBufferMemoryBindInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
: buffer( buffer_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
{
bindCount = bindCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
{
pBinds = pBinds_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SparseBufferMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
{
bindCount = static_cast<uint32_t>( binds_.size() );
pBinds = binds_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>( this );
}
operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseBufferMemoryBindInfo*>( this );
}
operator VkSparseBufferMemoryBindInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSparseBufferMemoryBindInfo*>( this );
}
operator VkSparseBufferMemoryBindInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSparseBufferMemoryBindInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( buffer, bindCount, pBinds );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default;
#else
bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( buffer == rhs.buffer )
&& ( bindCount == rhs.bindCount )
&& ( pBinds == rhs.pBinds );
#endif
}
bool operator!=( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
uint32_t bindCount = {};
const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {};
};
// wrapper struct for struct VkSparseImageOpaqueMemoryBindInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageOpaqueMemoryBindInfo.html
struct SparseImageOpaqueMemoryBindInfo
{
using NativeType = VkSparseImageOpaqueMemoryBindInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {}) VULKAN_HPP_NOEXCEPT
: image{ image_ }, bindCount{ bindCount_ }, pBinds{ pBinds_ }
{}
VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageOpaqueMemoryBindInfo( *reinterpret_cast<SparseImageOpaqueMemoryBindInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
: image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
{
bindCount = bindCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
{
pBinds = pBinds_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SparseImageOpaqueMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
{
bindCount = static_cast<uint32_t>( binds_.size() );
pBinds = binds_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>( this );
}
operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo*>( this );
}
operator VkSparseImageOpaqueMemoryBindInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>( this );
}
operator VkSparseImageOpaqueMemoryBindInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Image const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( image, bindCount, pBinds );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default;
#else
bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( image == rhs.image )
&& ( bindCount == rhs.bindCount )
&& ( pBinds == rhs.pBinds );
#endif
}
bool operator!=( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Image image = {};
uint32_t bindCount = {};
const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {};
};
// wrapper struct for struct VkImageSubresource, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresource.html
struct ImageSubresource
{
using NativeType = VkImageSubresource;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageSubresource(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {}) VULKAN_HPP_NOEXCEPT
: aspectMask{ aspectMask_ }, mipLevel{ mipLevel_ }, arrayLayer{ arrayLayer_ }
{}
VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageSubresource( *reinterpret_cast<ImageSubresource const *>( &rhs ) )
{}
ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
{
mipLevel = mipLevel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT
{
arrayLayer = arrayLayer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageSubresource*>( this );
}
operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageSubresource*>( this );
}
operator VkImageSubresource const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageSubresource*>( this );
}
operator VkImageSubresource *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageSubresource*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( aspectMask, mipLevel, arrayLayer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageSubresource const & ) const = default;
#else
bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( aspectMask == rhs.aspectMask )
&& ( mipLevel == rhs.mipLevel )
&& ( arrayLayer == rhs.arrayLayer );
#endif
}
bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
uint32_t mipLevel = {};
uint32_t arrayLayer = {};
};
// wrapper struct for struct VkOffset3D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOffset3D.html
struct Offset3D
{
using NativeType = VkOffset3D;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR Offset3D(int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {}) VULKAN_HPP_NOEXCEPT
: x{ x_ }, y{ y_ }, z{ z_ }
{}
VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
: Offset3D( *reinterpret_cast<Offset3D const *>( &rhs ) )
{}
explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} )
: x( offset2D.x )
, y( offset2D.y )
, z( z_ )
{}
Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
{
x = x_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
{
y = y_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT
{
z = z_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOffset3D*>( this );
}
operator VkOffset3D &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkOffset3D*>( this );
}
operator VkOffset3D const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkOffset3D*>( this );
}
operator VkOffset3D *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkOffset3D*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<int32_t const &, int32_t const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( x, y, z );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( Offset3D const & ) const = default;
#else
bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( x == rhs.x )
&& ( y == rhs.y )
&& ( z == rhs.z );
#endif
}
bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
int32_t x = {};
int32_t y = {};
int32_t z = {};
};
// wrapper struct for struct VkExtent3D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExtent3D.html
struct Extent3D
{
using NativeType = VkExtent3D;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR Extent3D(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
: width{ width_ }, height{ height_ }, depth{ depth_ }
{}
VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
: Extent3D( *reinterpret_cast<Extent3D const *>( &rhs ) )
{}
explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} )
: width( extent2D.width )
, height( extent2D.height )
, depth( depth_ )
{}
Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
{
width = width_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
{
height = height_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
{
depth = depth_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExtent3D*>( this );
}
operator VkExtent3D &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExtent3D*>( this );
}
operator VkExtent3D const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExtent3D*>( this );
}
operator VkExtent3D *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExtent3D*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( width, height, depth );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( Extent3D const & ) const = default;
#else
bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( width == rhs.width )
&& ( height == rhs.height )
&& ( depth == rhs.depth );
#endif
}
bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t width = {};
uint32_t height = {};
uint32_t depth = {};
};
// wrapper struct for struct VkSparseImageMemoryBind, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryBind.html
struct SparseImageMemoryBind
{
using NativeType = VkSparseImageMemoryBind;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SparseImageMemoryBind(VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
: subresource{ subresource_ }, offset{ offset_ }, extent{ extent_ }, memory{ memory_ }, memoryOffset{ memoryOffset_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageMemoryBind( *reinterpret_cast<SparseImageMemoryBind const *>( &rhs ) )
{}
SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT
{
subresource = subresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageMemoryBind*>( this );
}
operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageMemoryBind*>( this );
}
operator VkSparseImageMemoryBind const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSparseImageMemoryBind*>( this );
}
operator VkSparseImageMemoryBind *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSparseImageMemoryBind*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresource const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( subresource, offset, extent, memory, memoryOffset, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SparseImageMemoryBind const & ) const = default;
#else
bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( subresource == rhs.subresource )
&& ( offset == rhs.offset )
&& ( extent == rhs.extent )
&& ( memory == rhs.memory )
&& ( memoryOffset == rhs.memoryOffset )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {};
VULKAN_HPP_NAMESPACE::Offset3D offset = {};
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
};
// wrapper struct for struct VkSparseImageMemoryBindInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryBindInfo.html
struct SparseImageMemoryBindInfo
{
using NativeType = VkSparseImageMemoryBindInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {}) VULKAN_HPP_NOEXCEPT
: image{ image_ }, bindCount{ bindCount_ }, pBinds{ pBinds_ }
{}
VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageMemoryBindInfo( *reinterpret_cast<SparseImageMemoryBindInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ )
: image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
{
bindCount = bindCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
{
pBinds = pBinds_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SparseImageMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
{
bindCount = static_cast<uint32_t>( binds_.size() );
pBinds = binds_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>( this );
}
operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageMemoryBindInfo*>( this );
}
operator VkSparseImageMemoryBindInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSparseImageMemoryBindInfo*>( this );
}
operator VkSparseImageMemoryBindInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSparseImageMemoryBindInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Image const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( image, bindCount, pBinds );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SparseImageMemoryBindInfo const & ) const = default;
#else
bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( image == rhs.image )
&& ( bindCount == rhs.bindCount )
&& ( pBinds == rhs.pBinds );
#endif
}
bool operator!=( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Image image = {};
uint32_t bindCount = {};
const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds = {};
};
// wrapper struct for struct VkBindSparseInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindSparseInfo.html
struct BindSparseInfo
{
using NativeType = VkBindSparseInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindSparseInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, uint32_t bufferBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ = {}, uint32_t imageOpaqueBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {}, uint32_t imageBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, waitSemaphoreCount{ waitSemaphoreCount_ }, pWaitSemaphores{ pWaitSemaphores_ }, bufferBindCount{ bufferBindCount_ }, pBufferBinds{ pBufferBinds_ }, imageOpaqueBindCount{ imageOpaqueBindCount_ }, pImageOpaqueBinds{ pImageOpaqueBinds_ }, imageBindCount{ imageBindCount_ }, pImageBinds{ pImageBinds_ }, signalSemaphoreCount{ signalSemaphoreCount_ }, pSignalSemaphores{ pSignalSemaphores_ }
{}
VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BindSparseInfo( *reinterpret_cast<BindSparseInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindSparseInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), bufferBindCount( static_cast<uint32_t>( bufferBinds_.size() ) ), pBufferBinds( bufferBinds_.data() ), imageOpaqueBindCount( static_cast<uint32_t>( imageOpaqueBinds_.size() ) ), pImageOpaqueBinds( imageOpaqueBinds_.data() ), imageBindCount( static_cast<uint32_t>( imageBinds_.size() ) ), pImageBinds( imageBinds_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindSparseInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = waitSemaphoreCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphores = pWaitSemaphores_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindSparseInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
pWaitSemaphores = waitSemaphores_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT
{
bufferBindCount = bufferBindCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
{
pBufferBinds = pBufferBinds_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindSparseInfo & setBufferBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ ) VULKAN_HPP_NOEXCEPT
{
bufferBindCount = static_cast<uint32_t>( bufferBinds_.size() );
pBufferBinds = bufferBinds_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT
{
imageOpaqueBindCount = imageOpaqueBindCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
{
pImageOpaqueBinds = pImageOpaqueBinds_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindSparseInfo & setImageOpaqueBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
{
imageOpaqueBindCount = static_cast<uint32_t>( imageOpaqueBinds_.size() );
pImageOpaqueBinds = imageOpaqueBinds_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT
{
imageBindCount = imageBindCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT
{
pImageBinds = pImageBinds_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindSparseInfo & setImageBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ ) VULKAN_HPP_NOEXCEPT
{
imageBindCount = static_cast<uint32_t>( imageBinds_.size() );
pImageBinds = imageBinds_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = signalSemaphoreCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphores = pSignalSemaphores_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindSparseInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
pSignalSemaphores = signalSemaphores_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindSparseInfo*>( this );
}
operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindSparseInfo*>( this );
}
operator VkBindSparseInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindSparseInfo*>( this );
}
operator VkBindSparseInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindSparseInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, bufferBindCount, pBufferBinds, imageOpaqueBindCount, pImageOpaqueBinds, imageBindCount, pImageBinds, signalSemaphoreCount, pSignalSemaphores );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindSparseInfo const & ) const = default;
#else
bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( waitSemaphoreCount == rhs.waitSemaphoreCount )
&& ( pWaitSemaphores == rhs.pWaitSemaphores )
&& ( bufferBindCount == rhs.bufferBindCount )
&& ( pBufferBinds == rhs.pBufferBinds )
&& ( imageOpaqueBindCount == rhs.imageOpaqueBindCount )
&& ( pImageOpaqueBinds == rhs.pImageOpaqueBinds )
&& ( imageBindCount == rhs.imageBindCount )
&& ( pImageBinds == rhs.pImageBinds )
&& ( signalSemaphoreCount == rhs.signalSemaphoreCount )
&& ( pSignalSemaphores == rhs.pSignalSemaphores );
#endif
}
bool operator!=( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo;
const void * pNext = {};
uint32_t waitSemaphoreCount = {};
const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {};
uint32_t bufferBindCount = {};
const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds = {};
uint32_t imageOpaqueBindCount = {};
const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds = {};
uint32_t imageBindCount = {};
const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds = {};
uint32_t signalSemaphoreCount = {};
const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {};
};
template <>
struct CppType<StructureType, StructureType::eBindSparseInfo>
{
using Type = BindSparseInfo;
};
// wrapper struct for struct VkBindTensorMemoryInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindTensorMemoryInfoARM.html
struct BindTensorMemoryInfoARM
{
using NativeType = VkBindTensorMemoryInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindTensorMemoryInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindTensorMemoryInfoARM(VULKAN_HPP_NAMESPACE::TensorARM tensor_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tensor{ tensor_ }, memory{ memory_ }, memoryOffset{ memoryOffset_ }
{}
VULKAN_HPP_CONSTEXPR BindTensorMemoryInfoARM( BindTensorMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindTensorMemoryInfoARM( VkBindTensorMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: BindTensorMemoryInfoARM( *reinterpret_cast<BindTensorMemoryInfoARM const *>( &rhs ) )
{}
BindTensorMemoryInfoARM & operator=( BindTensorMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindTensorMemoryInfoARM & operator=( VkBindTensorMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM & setTensor( VULKAN_HPP_NAMESPACE::TensorARM tensor_ ) VULKAN_HPP_NOEXCEPT
{
tensor = tensor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindTensorMemoryInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindTensorMemoryInfoARM*>( this );
}
operator VkBindTensorMemoryInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindTensorMemoryInfoARM*>( this );
}
operator VkBindTensorMemoryInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindTensorMemoryInfoARM*>( this );
}
operator VkBindTensorMemoryInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindTensorMemoryInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TensorARM const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tensor, memory, memoryOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindTensorMemoryInfoARM const & ) const = default;
#else
bool operator==( BindTensorMemoryInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tensor == rhs.tensor )
&& ( memory == rhs.memory )
&& ( memoryOffset == rhs.memoryOffset );
#endif
}
bool operator!=( BindTensorMemoryInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindTensorMemoryInfoARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TensorARM tensor = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
};
template <>
struct CppType<StructureType, StructureType::eBindTensorMemoryInfoARM>
{
using Type = BindTensorMemoryInfoARM;
};
// wrapper struct for struct VkBindVertexBufferIndirectCommandEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindVertexBufferIndirectCommandEXT.html
struct BindVertexBufferIndirectCommandEXT
{
using NativeType = VkBindVertexBufferIndirectCommandEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandEXT(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {}) VULKAN_HPP_NOEXCEPT
: bufferAddress{ bufferAddress_ }, size{ size_ }, stride{ stride_ }
{}
VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandEXT( BindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindVertexBufferIndirectCommandEXT( VkBindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: BindVertexBufferIndirectCommandEXT( *reinterpret_cast<BindVertexBufferIndirectCommandEXT const *>( &rhs ) )
{}
BindVertexBufferIndirectCommandEXT & operator=( BindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindVertexBufferIndirectCommandEXT & operator=( VkBindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferAddress = bufferAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
{
stride = stride_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindVertexBufferIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindVertexBufferIndirectCommandEXT*>( this );
}
operator VkBindVertexBufferIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindVertexBufferIndirectCommandEXT*>( this );
}
operator VkBindVertexBufferIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindVertexBufferIndirectCommandEXT*>( this );
}
operator VkBindVertexBufferIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindVertexBufferIndirectCommandEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( bufferAddress, size, stride );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindVertexBufferIndirectCommandEXT const & ) const = default;
#else
bool operator==( BindVertexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( bufferAddress == rhs.bufferAddress )
&& ( size == rhs.size )
&& ( stride == rhs.stride );
#endif
}
bool operator!=( BindVertexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
uint32_t size = {};
uint32_t stride = {};
};
// wrapper struct for struct VkBindVertexBufferIndirectCommandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindVertexBufferIndirectCommandNV.html
struct BindVertexBufferIndirectCommandNV
{
using NativeType = VkBindVertexBufferIndirectCommandNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {}) VULKAN_HPP_NOEXCEPT
: bufferAddress{ bufferAddress_ }, size{ size_ }, stride{ stride_ }
{}
VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: BindVertexBufferIndirectCommandNV( *reinterpret_cast<BindVertexBufferIndirectCommandNV const *>( &rhs ) )
{}
BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferAddress = bufferAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
{
stride = stride_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindVertexBufferIndirectCommandNV*>( this );
}
operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindVertexBufferIndirectCommandNV*>( this );
}
operator VkBindVertexBufferIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindVertexBufferIndirectCommandNV*>( this );
}
operator VkBindVertexBufferIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindVertexBufferIndirectCommandNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( bufferAddress, size, stride );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default;
#else
bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( bufferAddress == rhs.bufferAddress )
&& ( size == rhs.size )
&& ( stride == rhs.stride );
#endif
}
bool operator!=( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
uint32_t size = {};
uint32_t stride = {};
};
// wrapper struct for struct VkBindVideoSessionMemoryInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindVideoSessionMemoryInfoKHR.html
struct BindVideoSessionMemoryInfoKHR
{
using NativeType = VkBindVideoSessionMemoryInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindVideoSessionMemoryInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR(uint32_t memoryBindIndex_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryBindIndex{ memoryBindIndex_ }, memory{ memory_ }, memoryOffset{ memoryOffset_ }, memorySize{ memorySize_ }
{}
VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindVideoSessionMemoryInfoKHR( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: BindVideoSessionMemoryInfoKHR( *reinterpret_cast<BindVideoSessionMemoryInfoKHR const *>( &rhs ) )
{}
BindVideoSessionMemoryInfoKHR & operator=( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BindVideoSessionMemoryInfoKHR & operator=( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT
{
memoryBindIndex = memoryBindIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemorySize( VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT
{
memorySize = memorySize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBindVideoSessionMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR*>( this );
}
operator VkBindVideoSessionMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindVideoSessionMemoryInfoKHR*>( this );
}
operator VkBindVideoSessionMemoryInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR*>( this );
}
operator VkBindVideoSessionMemoryInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBindVideoSessionMemoryInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BindVideoSessionMemoryInfoKHR const & ) const = default;
#else
bool operator==( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryBindIndex == rhs.memoryBindIndex )
&& ( memory == rhs.memory )
&& ( memoryOffset == rhs.memoryOffset )
&& ( memorySize == rhs.memorySize );
#endif
}
bool operator!=( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindVideoSessionMemoryInfoKHR;
const void * pNext = {};
uint32_t memoryBindIndex = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize memorySize = {};
};
template <>
struct CppType<StructureType, StructureType::eBindVideoSessionMemoryInfoKHR>
{
using Type = BindVideoSessionMemoryInfoKHR;
};
// wrapper struct for struct VkBlitImageCubicWeightsInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlitImageCubicWeightsInfoQCOM.html
struct BlitImageCubicWeightsInfoQCOM
{
using NativeType = VkBlitImageCubicWeightsInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageCubicWeightsInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BlitImageCubicWeightsInfoQCOM(VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cubicWeights{ cubicWeights_ }
{}
VULKAN_HPP_CONSTEXPR BlitImageCubicWeightsInfoQCOM( BlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BlitImageCubicWeightsInfoQCOM( VkBlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: BlitImageCubicWeightsInfoQCOM( *reinterpret_cast<BlitImageCubicWeightsInfoQCOM const *>( &rhs ) )
{}
BlitImageCubicWeightsInfoQCOM & operator=( BlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BlitImageCubicWeightsInfoQCOM & operator=( VkBlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM & setCubicWeights( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ ) VULKAN_HPP_NOEXCEPT
{
cubicWeights = cubicWeights_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBlitImageCubicWeightsInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBlitImageCubicWeightsInfoQCOM*>( this );
}
operator VkBlitImageCubicWeightsInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBlitImageCubicWeightsInfoQCOM*>( this );
}
operator VkBlitImageCubicWeightsInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBlitImageCubicWeightsInfoQCOM*>( this );
}
operator VkBlitImageCubicWeightsInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBlitImageCubicWeightsInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cubicWeights );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BlitImageCubicWeightsInfoQCOM const & ) const = default;
#else
bool operator==( BlitImageCubicWeightsInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cubicWeights == rhs.cubicWeights );
#endif
}
bool operator!=( BlitImageCubicWeightsInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageCubicWeightsInfoQCOM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom;
};
template <>
struct CppType<StructureType, StructureType::eBlitImageCubicWeightsInfoQCOM>
{
using Type = BlitImageCubicWeightsInfoQCOM;
};
// wrapper struct for struct VkImageSubresourceLayers, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresourceLayers.html
struct ImageSubresourceLayers
{
using NativeType = VkImageSubresourceLayers;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageSubresourceLayers(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
: aspectMask{ aspectMask_ }, mipLevel{ mipLevel_ }, baseArrayLayer{ baseArrayLayer_ }, layerCount{ layerCount_ }
{}
VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageSubresourceLayers( *reinterpret_cast<ImageSubresourceLayers const *>( &rhs ) )
{}
ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
{
mipLevel = mipLevel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
{
baseArrayLayer = baseArrayLayer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
{
layerCount = layerCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageSubresourceLayers*>( this );
}
operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageSubresourceLayers*>( this );
}
operator VkImageSubresourceLayers const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageSubresourceLayers*>( this );
}
operator VkImageSubresourceLayers *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageSubresourceLayers*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( aspectMask, mipLevel, baseArrayLayer, layerCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageSubresourceLayers const & ) const = default;
#else
bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( aspectMask == rhs.aspectMask )
&& ( mipLevel == rhs.mipLevel )
&& ( baseArrayLayer == rhs.baseArrayLayer )
&& ( layerCount == rhs.layerCount );
#endif
}
bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
uint32_t mipLevel = {};
uint32_t baseArrayLayer = {};
uint32_t layerCount = {};
};
// wrapper struct for struct VkImageBlit2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageBlit2.html
struct ImageBlit2
{
using NativeType = VkImageBlit2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ImageBlit2(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcSubresource{ srcSubresource_ }, srcOffsets{ srcOffsets_ }, dstSubresource{ dstSubresource_ }, dstOffsets{ dstOffsets_ }
{}
VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageBlit2( *reinterpret_cast<ImageBlit2 const *>( &rhs ) )
{}
ImageBlit2 & operator=( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageBlit2 & operator=( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
{
srcOffsets = srcOffsets_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
{
dstOffsets = dstOffsets_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageBlit2*>( this );
}
operator VkImageBlit2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageBlit2*>( this );
}
operator VkImageBlit2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageBlit2*>( this );
}
operator VkImageBlit2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageBlit2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcSubresource, srcOffsets, dstSubresource, dstOffsets );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageBlit2 const & ) const = default;
#else
bool operator==( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcSubresource == rhs.srcSubresource )
&& ( srcOffsets == rhs.srcOffsets )
&& ( dstSubresource == rhs.dstSubresource )
&& ( dstOffsets == rhs.dstOffsets );
#endif
}
bool operator!=( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
};
template <>
struct CppType<StructureType, StructureType::eImageBlit2>
{
using Type = ImageBlit2;
};
using ImageBlit2KHR = ImageBlit2;
// wrapper struct for struct VkBlitImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlitImageInfo2.html
struct BlitImageInfo2
{
using NativeType = VkBlitImageInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ = {}, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcImage{ srcImage_ }, srcImageLayout{ srcImageLayout_ }, dstImage{ dstImage_ }, dstImageLayout{ dstImageLayout_ }, regionCount{ regionCount_ }, pRegions{ pRegions_ }, filter{ filter_ }
{}
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BlitImageInfo2( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: BlitImageInfo2( *reinterpret_cast<BlitImageInfo2 const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BlitImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2> const & regions_, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, const void * pNext_ = nullptr )
: pNext( pNext_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() ), filter( filter_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
BlitImageInfo2 & operator=( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BlitImageInfo2 & operator=( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BlitImageInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
{
srcImage = srcImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
srcImageLayout = srcImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
{
dstImage = dstImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
dstImageLayout = dstImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = regionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BlitImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT
{
filter = filter_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBlitImageInfo2*>( this );
}
operator VkBlitImageInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBlitImageInfo2*>( this );
}
operator VkBlitImageInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBlitImageInfo2*>( this );
}
operator VkBlitImageInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBlitImageInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageBlit2 * const &, VULKAN_HPP_NAMESPACE::Filter const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BlitImageInfo2 const & ) const = default;
#else
bool operator==( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcImage == rhs.srcImage )
&& ( srcImageLayout == rhs.srcImageLayout )
&& ( dstImage == rhs.dstImage )
&& ( dstImageLayout == rhs.dstImageLayout )
&& ( regionCount == rhs.regionCount )
&& ( pRegions == rhs.pRegions )
&& ( filter == rhs.filter );
#endif
}
bool operator!=( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image srcImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::Image dstImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions = {};
VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
};
template <>
struct CppType<StructureType, StructureType::eBlitImageInfo2>
{
using Type = BlitImageInfo2;
};
using BlitImageInfo2KHR = BlitImageInfo2;
// wrapper struct for struct VkBufferCaptureDescriptorDataInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCaptureDescriptorDataInfoEXT.html
struct BufferCaptureDescriptorDataInfoEXT
{
using NativeType = VkBufferCaptureDescriptorDataInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCaptureDescriptorDataInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, buffer{ buffer_ }
{}
VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCaptureDescriptorDataInfoEXT( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferCaptureDescriptorDataInfoEXT( *reinterpret_cast<BufferCaptureDescriptorDataInfoEXT const *>( &rhs ) )
{}
BufferCaptureDescriptorDataInfoEXT & operator=( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferCaptureDescriptorDataInfoEXT & operator=( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT*>( this );
}
operator VkBufferCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCaptureDescriptorDataInfoEXT*>( this );
}
operator VkBufferCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT*>( this );
}
operator VkBufferCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferCaptureDescriptorDataInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, buffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferCaptureDescriptorDataInfoEXT const & ) const = default;
#else
bool operator==( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( buffer == rhs.buffer );
#endif
}
bool operator!=( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCaptureDescriptorDataInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferCaptureDescriptorDataInfoEXT>
{
using Type = BufferCaptureDescriptorDataInfoEXT;
};
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkBufferCollectionBufferCreateInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionBufferCreateInfoFUCHSIA.html
struct BufferCollectionBufferCreateInfoFUCHSIA
{
using NativeType = VkBufferCollectionBufferCreateInfoFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, collection{ collection_ }, index{ index_ }
{}
VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCollectionBufferCreateInfoFUCHSIA( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferCollectionBufferCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs ) )
{}
BufferCollectionBufferCreateInfoFUCHSIA & operator=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferCollectionBufferCreateInfoFUCHSIA & operator=( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
{
collection = collection_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
{
index = index_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferCollectionBufferCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCollectionBufferCreateInfoFUCHSIA*>( this );
}
operator VkBufferCollectionBufferCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCollectionBufferCreateInfoFUCHSIA*>( this );
}
operator VkBufferCollectionBufferCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferCollectionBufferCreateInfoFUCHSIA*>( this );
}
operator VkBufferCollectionBufferCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferCollectionBufferCreateInfoFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, collection, index );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferCollectionBufferCreateInfoFUCHSIA const & ) const = default;
#else
bool operator==( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( collection == rhs.collection )
&& ( index == rhs.index );
#endif
}
bool operator!=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
uint32_t index = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferCollectionBufferCreateInfoFUCHSIA>
{
using Type = BufferCollectionBufferCreateInfoFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkBufferCollectionConstraintsInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionConstraintsInfoFUCHSIA.html
struct BufferCollectionConstraintsInfoFUCHSIA
{
using NativeType = VkBufferCollectionConstraintsInfoFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA(uint32_t minBufferCount_ = {}, uint32_t maxBufferCount_ = {}, uint32_t minBufferCountForCamping_ = {}, uint32_t minBufferCountForDedicatedSlack_ = {}, uint32_t minBufferCountForSharedSlack_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minBufferCount{ minBufferCount_ }, maxBufferCount{ maxBufferCount_ }, minBufferCountForCamping{ minBufferCountForCamping_ }, minBufferCountForDedicatedSlack{ minBufferCountForDedicatedSlack_ }, minBufferCountForSharedSlack{ minBufferCountForSharedSlack_ }
{}
VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCollectionConstraintsInfoFUCHSIA( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferCollectionConstraintsInfoFUCHSIA( *reinterpret_cast<BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs ) )
{}
BufferCollectionConstraintsInfoFUCHSIA & operator=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferCollectionConstraintsInfoFUCHSIA & operator=( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCount( uint32_t minBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
minBufferCount = minBufferCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMaxBufferCount( uint32_t maxBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
maxBufferCount = maxBufferCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForCamping( uint32_t minBufferCountForCamping_ ) VULKAN_HPP_NOEXCEPT
{
minBufferCountForCamping = minBufferCountForCamping_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) VULKAN_HPP_NOEXCEPT
{
minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) VULKAN_HPP_NOEXCEPT
{
minBufferCountForSharedSlack = minBufferCountForSharedSlack_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferCollectionConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCollectionConstraintsInfoFUCHSIA*>( this );
}
operator VkBufferCollectionConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCollectionConstraintsInfoFUCHSIA*>( this );
}
operator VkBufferCollectionConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferCollectionConstraintsInfoFUCHSIA*>( this );
}
operator VkBufferCollectionConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferCollectionConstraintsInfoFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minBufferCount, maxBufferCount, minBufferCountForCamping, minBufferCountForDedicatedSlack, minBufferCountForSharedSlack );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferCollectionConstraintsInfoFUCHSIA const & ) const = default;
#else
bool operator==( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minBufferCount == rhs.minBufferCount )
&& ( maxBufferCount == rhs.maxBufferCount )
&& ( minBufferCountForCamping == rhs.minBufferCountForCamping )
&& ( minBufferCountForDedicatedSlack == rhs.minBufferCountForDedicatedSlack )
&& ( minBufferCountForSharedSlack == rhs.minBufferCountForSharedSlack );
#endif
}
bool operator!=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
const void * pNext = {};
uint32_t minBufferCount = {};
uint32_t maxBufferCount = {};
uint32_t minBufferCountForCamping = {};
uint32_t minBufferCountForDedicatedSlack = {};
uint32_t minBufferCountForSharedSlack = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferCollectionConstraintsInfoFUCHSIA>
{
using Type = BufferCollectionConstraintsInfoFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkBufferCollectionCreateInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionCreateInfoFUCHSIA.html
struct BufferCollectionCreateInfoFUCHSIA
{
using NativeType = VkBufferCollectionCreateInfoFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionCreateInfoFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA(zx_handle_t collectionToken_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, collectionToken{ collectionToken_ }
{}
VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCollectionCreateInfoFUCHSIA( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferCollectionCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionCreateInfoFUCHSIA const *>( &rhs ) )
{}
BufferCollectionCreateInfoFUCHSIA & operator=( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferCollectionCreateInfoFUCHSIA & operator=( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setCollectionToken( zx_handle_t collectionToken_ ) VULKAN_HPP_NOEXCEPT
{
collectionToken = collectionToken_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferCollectionCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA*>( this );
}
operator VkBufferCollectionCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCollectionCreateInfoFUCHSIA*>( this );
}
operator VkBufferCollectionCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA*>( this );
}
operator VkBufferCollectionCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferCollectionCreateInfoFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, zx_handle_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, collectionToken );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ) == 0 );
}
bool operator!=( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionCreateInfoFUCHSIA;
const void * pNext = {};
zx_handle_t collectionToken = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferCollectionCreateInfoFUCHSIA>
{
using Type = BufferCollectionCreateInfoFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkBufferCollectionImageCreateInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionImageCreateInfoFUCHSIA.html
struct BufferCollectionImageCreateInfoFUCHSIA
{
using NativeType = VkBufferCollectionImageCreateInfoFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, collection{ collection_ }, index{ index_ }
{}
VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCollectionImageCreateInfoFUCHSIA( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferCollectionImageCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs ) )
{}
BufferCollectionImageCreateInfoFUCHSIA & operator=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferCollectionImageCreateInfoFUCHSIA & operator=( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
{
collection = collection_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
{
index = index_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferCollectionImageCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCollectionImageCreateInfoFUCHSIA*>( this );
}
operator VkBufferCollectionImageCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCollectionImageCreateInfoFUCHSIA*>( this );
}
operator VkBufferCollectionImageCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferCollectionImageCreateInfoFUCHSIA*>( this );
}
operator VkBufferCollectionImageCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferCollectionImageCreateInfoFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, collection, index );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferCollectionImageCreateInfoFUCHSIA const & ) const = default;
#else
bool operator==( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( collection == rhs.collection )
&& ( index == rhs.index );
#endif
}
bool operator!=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
uint32_t index = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferCollectionImageCreateInfoFUCHSIA>
{
using Type = BufferCollectionImageCreateInfoFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkSysmemColorSpaceFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSysmemColorSpaceFUCHSIA.html
struct SysmemColorSpaceFUCHSIA
{
using NativeType = VkSysmemColorSpaceFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSysmemColorSpaceFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA(uint32_t colorSpace_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, colorSpace{ colorSpace_ }
{}
VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SysmemColorSpaceFUCHSIA( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: SysmemColorSpaceFUCHSIA( *reinterpret_cast<SysmemColorSpaceFUCHSIA const *>( &rhs ) )
{}
SysmemColorSpaceFUCHSIA & operator=( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SysmemColorSpaceFUCHSIA & operator=( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setColorSpace( uint32_t colorSpace_ ) VULKAN_HPP_NOEXCEPT
{
colorSpace = colorSpace_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSysmemColorSpaceFUCHSIA*>( this );
}
operator VkSysmemColorSpaceFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSysmemColorSpaceFUCHSIA*>( this );
}
operator VkSysmemColorSpaceFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSysmemColorSpaceFUCHSIA*>( this );
}
operator VkSysmemColorSpaceFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSysmemColorSpaceFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, colorSpace );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SysmemColorSpaceFUCHSIA const & ) const = default;
#else
bool operator==( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( colorSpace == rhs.colorSpace );
#endif
}
bool operator!=( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSysmemColorSpaceFUCHSIA;
const void * pNext = {};
uint32_t colorSpace = {};
};
template <>
struct CppType<StructureType, StructureType::eSysmemColorSpaceFUCHSIA>
{
using Type = SysmemColorSpaceFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkBufferCollectionPropertiesFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionPropertiesFUCHSIA.html
struct BufferCollectionPropertiesFUCHSIA
{
using NativeType = VkBufferCollectionPropertiesFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionPropertiesFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA(uint32_t memoryTypeBits_ = {}, uint32_t bufferCount_ = {}, uint32_t createInfoIndex_ = {}, uint64_t sysmemPixelFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryTypeBits{ memoryTypeBits_ }, bufferCount{ bufferCount_ }, createInfoIndex{ createInfoIndex_ }, sysmemPixelFormat{ sysmemPixelFormat_ }, formatFeatures{ formatFeatures_ }, sysmemColorSpaceIndex{ sysmemColorSpaceIndex_ }, samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ }, suggestedYcbcrModel{ suggestedYcbcrModel_ }, suggestedYcbcrRange{ suggestedYcbcrRange_ }, suggestedXChromaOffset{ suggestedXChromaOffset_ }, suggestedYChromaOffset{ suggestedYChromaOffset_ }
{}
VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCollectionPropertiesFUCHSIA( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferCollectionPropertiesFUCHSIA( *reinterpret_cast<BufferCollectionPropertiesFUCHSIA const *>( &rhs ) )
{}
BufferCollectionPropertiesFUCHSIA & operator=( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferCollectionPropertiesFUCHSIA & operator=( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA const *>( &rhs );
return *this;
}
operator VkBufferCollectionPropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCollectionPropertiesFUCHSIA*>( this );
}
operator VkBufferCollectionPropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA*>( this );
}
operator VkBufferCollectionPropertiesFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferCollectionPropertiesFUCHSIA*>( this );
}
operator VkBufferCollectionPropertiesFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint64_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryTypeBits, bufferCount, createInfoIndex, sysmemPixelFormat, formatFeatures, sysmemColorSpaceIndex, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferCollectionPropertiesFUCHSIA const & ) const = default;
#else
bool operator==( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryTypeBits == rhs.memoryTypeBits )
&& ( bufferCount == rhs.bufferCount )
&& ( createInfoIndex == rhs.createInfoIndex )
&& ( sysmemPixelFormat == rhs.sysmemPixelFormat )
&& ( formatFeatures == rhs.formatFeatures )
&& ( sysmemColorSpaceIndex == rhs.sysmemColorSpaceIndex )
&& ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
&& ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
&& ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
&& ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
&& ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
#endif
}
bool operator!=( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionPropertiesFUCHSIA;
void * pNext = {};
uint32_t memoryTypeBits = {};
uint32_t bufferCount = {};
uint32_t createInfoIndex = {};
uint64_t sysmemPixelFormat = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex = {};
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
};
template <>
struct CppType<StructureType, StructureType::eBufferCollectionPropertiesFUCHSIA>
{
using Type = BufferCollectionPropertiesFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
// wrapper struct for struct VkBufferCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCreateInfo.html
struct BufferCreateInfo
{
using NativeType = VkBufferCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferCreateInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, size{ size_ }, usage{ usage_ }, sharingMode{ sharingMode_ }, queueFamilyIndexCount{ queueFamilyIndexCount_ }, pQueueFamilyIndices{ pQueueFamilyIndices_ }
{}
VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferCreateInfo( *reinterpret_cast<BufferCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::DeviceSize size_, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
{
sharingMode = sharingMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = queueFamilyIndexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
pQueueFamilyIndices = pQueueFamilyIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BufferCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
pQueueFamilyIndices = queueFamilyIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCreateInfo*>( this );
}
operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCreateInfo*>( this );
}
operator VkBufferCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferCreateInfo*>( this );
}
operator VkBufferCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCreateFlags const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::BufferUsageFlags const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, size, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferCreateInfo const & ) const = default;
#else
bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( size == rhs.size )
&& ( usage == rhs.usage )
&& ( sharingMode == rhs.sharingMode )
&& ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
&& ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
#endif
}
bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
uint32_t queueFamilyIndexCount = {};
const uint32_t * pQueueFamilyIndices = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferCreateInfo>
{
using Type = BufferCreateInfo;
};
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkBufferConstraintsInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferConstraintsInfoFUCHSIA.html
struct BufferConstraintsInfoFUCHSIA
{
using NativeType = VkBufferConstraintsInfoFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferConstraintsInfoFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, createInfo{ createInfo_ }, requiredFormatFeatures{ requiredFormatFeatures_ }, bufferCollectionConstraints{ bufferCollectionConstraints_ }
{}
VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferConstraintsInfoFUCHSIA( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferConstraintsInfoFUCHSIA( *reinterpret_cast<BufferConstraintsInfoFUCHSIA const *>( &rhs ) )
{}
BufferConstraintsInfoFUCHSIA & operator=( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferConstraintsInfoFUCHSIA & operator=( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo_ ) VULKAN_HPP_NOEXCEPT
{
createInfo = createInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
{
requiredFormatFeatures = requiredFormatFeatures_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT
{
bufferCollectionConstraints = bufferCollectionConstraints_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA*>( this );
}
operator VkBufferConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferConstraintsInfoFUCHSIA*>( this );
}
operator VkBufferConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA*>( this );
}
operator VkBufferConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferConstraintsInfoFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCreateInfo const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, createInfo, requiredFormatFeatures, bufferCollectionConstraints );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferConstraintsInfoFUCHSIA const & ) const = default;
#else
bool operator==( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( createInfo == rhs.createInfo )
&& ( requiredFormatFeatures == rhs.requiredFormatFeatures )
&& ( bufferCollectionConstraints == rhs.bufferCollectionConstraints );
#endif
}
bool operator!=( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferConstraintsInfoFUCHSIA;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {};
VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferConstraintsInfoFUCHSIA>
{
using Type = BufferConstraintsInfoFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
// wrapper struct for struct VkBufferCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCopy.html
struct BufferCopy
{
using NativeType = VkBufferCopy;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferCopy(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
: srcOffset{ srcOffset_ }, dstOffset{ dstOffset_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferCopy( *reinterpret_cast<BufferCopy const *>( &rhs ) )
{}
BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCopy*>( this );
}
operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCopy*>( this );
}
operator VkBufferCopy const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferCopy*>( this );
}
operator VkBufferCopy *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferCopy*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( srcOffset, dstOffset, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferCopy const & ) const = default;
#else
bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( srcOffset == rhs.srcOffset )
&& ( dstOffset == rhs.dstOffset )
&& ( size == rhs.size );
#endif
}
bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
// wrapper struct for struct VkBufferCopy2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCopy2.html
struct BufferCopy2
{
using NativeType = VkBufferCopy2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferCopy2(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcOffset{ srcOffset_ }, dstOffset{ dstOffset_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR BufferCopy2( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferCopy2( *reinterpret_cast<BufferCopy2 const *>( &rhs ) )
{}
BufferCopy2 & operator=( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferCopy2 & operator=( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCopy2*>( this );
}
operator VkBufferCopy2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCopy2*>( this );
}
operator VkBufferCopy2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferCopy2*>( this );
}
operator VkBufferCopy2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferCopy2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcOffset, dstOffset, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferCopy2 const & ) const = default;
#else
bool operator==( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcOffset == rhs.srcOffset )
&& ( dstOffset == rhs.dstOffset )
&& ( size == rhs.size );
#endif
}
bool operator!=( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferCopy2>
{
using Type = BufferCopy2;
};
using BufferCopy2KHR = BufferCopy2;
// wrapper struct for struct VkBufferDeviceAddressCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferDeviceAddressCreateInfoEXT.html
struct BufferDeviceAddressCreateInfoEXT
{
using NativeType = VkBufferDeviceAddressCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceAddress{ deviceAddress_ }
{}
VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferDeviceAddressCreateInfoEXT( *reinterpret_cast<BufferDeviceAddressCreateInfoEXT const *>( &rhs ) )
{}
BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT*>( this );
}
operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT*>( this );
}
operator VkBufferDeviceAddressCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT*>( this );
}
operator VkBufferDeviceAddressCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceAddress );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default;
#else
bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceAddress == rhs.deviceAddress );
#endif
}
bool operator!=( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferDeviceAddressCreateInfoEXT>
{
using Type = BufferDeviceAddressCreateInfoEXT;
};
// wrapper struct for struct VkBufferDeviceAddressInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferDeviceAddressInfo.html
struct BufferDeviceAddressInfo
{
using NativeType = VkBufferDeviceAddressInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, buffer{ buffer_ }
{}
VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferDeviceAddressInfo( *reinterpret_cast<BufferDeviceAddressInfo const *>( &rhs ) )
{}
BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferDeviceAddressInfo*>( this );
}
operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferDeviceAddressInfo*>( this );
}
operator VkBufferDeviceAddressInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferDeviceAddressInfo*>( this );
}
operator VkBufferDeviceAddressInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferDeviceAddressInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, buffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferDeviceAddressInfo const & ) const = default;
#else
bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( buffer == rhs.buffer );
#endif
}
bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferDeviceAddressInfo>
{
using Type = BufferDeviceAddressInfo;
};
using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;
// wrapper struct for struct VkBufferImageCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferImageCopy.html
struct BufferImageCopy
{
using NativeType = VkBufferImageCopy;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferImageCopy(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
: bufferOffset{ bufferOffset_ }, bufferRowLength{ bufferRowLength_ }, bufferImageHeight{ bufferImageHeight_ }, imageSubresource{ imageSubresource_ }, imageOffset{ imageOffset_ }, imageExtent{ imageExtent_ }
{}
VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferImageCopy( *reinterpret_cast<BufferImageCopy const *>( &rhs ) )
{}
BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
{
bufferOffset = bufferOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
{
bufferRowLength = bufferRowLength_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
{
bufferImageHeight = bufferImageHeight_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
{
imageSubresource = imageSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
{
imageOffset = imageOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferImageCopy*>( this );
}
operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferImageCopy*>( this );
}
operator VkBufferImageCopy const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferImageCopy*>( this );
}
operator VkBufferImageCopy *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferImageCopy*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferImageCopy const & ) const = default;
#else
bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( bufferOffset == rhs.bufferOffset )
&& ( bufferRowLength == rhs.bufferRowLength )
&& ( bufferImageHeight == rhs.bufferImageHeight )
&& ( imageSubresource == rhs.imageSubresource )
&& ( imageOffset == rhs.imageOffset )
&& ( imageExtent == rhs.imageExtent );
#endif
}
bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
uint32_t bufferRowLength = {};
uint32_t bufferImageHeight = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
};
// wrapper struct for struct VkBufferImageCopy2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferImageCopy2.html
struct BufferImageCopy2
{
using NativeType = VkBufferImageCopy2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferImageCopy2(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, bufferOffset{ bufferOffset_ }, bufferRowLength{ bufferRowLength_ }, bufferImageHeight{ bufferImageHeight_ }, imageSubresource{ imageSubresource_ }, imageOffset{ imageOffset_ }, imageExtent{ imageExtent_ }
{}
VULKAN_HPP_CONSTEXPR BufferImageCopy2( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferImageCopy2( *reinterpret_cast<BufferImageCopy2 const *>( &rhs ) )
{}
BufferImageCopy2 & operator=( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferImageCopy2 & operator=( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
{
bufferOffset = bufferOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
{
bufferRowLength = bufferRowLength_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
{
bufferImageHeight = bufferImageHeight_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
{
imageSubresource = imageSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
{
imageOffset = imageOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferImageCopy2*>( this );
}
operator VkBufferImageCopy2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferImageCopy2*>( this );
}
operator VkBufferImageCopy2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferImageCopy2*>( this );
}
operator VkBufferImageCopy2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferImageCopy2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferImageCopy2 const & ) const = default;
#else
bool operator==( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( bufferOffset == rhs.bufferOffset )
&& ( bufferRowLength == rhs.bufferRowLength )
&& ( bufferImageHeight == rhs.bufferImageHeight )
&& ( imageSubresource == rhs.imageSubresource )
&& ( imageOffset == rhs.imageOffset )
&& ( imageExtent == rhs.imageExtent );
#endif
}
bool operator!=( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
uint32_t bufferRowLength = {};
uint32_t bufferImageHeight = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferImageCopy2>
{
using Type = BufferImageCopy2;
};
using BufferImageCopy2KHR = BufferImageCopy2;
// wrapper struct for struct VkBufferMemoryBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferMemoryBarrier.html
struct BufferMemoryBarrier
{
using NativeType = VkBufferMemoryBarrier;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcAccessMask{ srcAccessMask_ }, dstAccessMask{ dstAccessMask_ }, srcQueueFamilyIndex{ srcQueueFamilyIndex_ }, dstQueueFamilyIndex{ dstQueueFamilyIndex_ }, buffer{ buffer_ }, offset{ offset_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferMemoryBarrier( *reinterpret_cast<BufferMemoryBarrier const *>( &rhs ) )
{}
BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
srcQueueFamilyIndex = srcQueueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
dstQueueFamilyIndex = dstQueueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferMemoryBarrier*>( this );
}
operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferMemoryBarrier*>( this );
}
operator VkBufferMemoryBarrier const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferMemoryBarrier*>( this );
}
operator VkBufferMemoryBarrier *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferMemoryBarrier*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcAccessMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferMemoryBarrier const & ) const = default;
#else
bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcAccessMask == rhs.srcAccessMask )
&& ( dstAccessMask == rhs.dstAccessMask )
&& ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
&& ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
&& ( buffer == rhs.buffer )
&& ( offset == rhs.offset )
&& ( size == rhs.size );
#endif
}
bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
uint32_t srcQueueFamilyIndex = {};
uint32_t dstQueueFamilyIndex = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferMemoryBarrier>
{
using Type = BufferMemoryBarrier;
};
// wrapper struct for struct VkBufferMemoryBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferMemoryBarrier2.html
struct BufferMemoryBarrier2
{
using NativeType = VkBufferMemoryBarrier2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcStageMask{ srcStageMask_ }, srcAccessMask{ srcAccessMask_ }, dstStageMask{ dstStageMask_ }, dstAccessMask{ dstAccessMask_ }, srcQueueFamilyIndex{ srcQueueFamilyIndex_ }, dstQueueFamilyIndex{ dstQueueFamilyIndex_ }, buffer{ buffer_ }, offset{ offset_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferMemoryBarrier2( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferMemoryBarrier2( *reinterpret_cast<BufferMemoryBarrier2 const *>( &rhs ) )
{}
BufferMemoryBarrier2 & operator=( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferMemoryBarrier2 & operator=( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
srcQueueFamilyIndex = srcQueueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
dstQueueFamilyIndex = dstQueueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferMemoryBarrier2*>( this );
}
operator VkBufferMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferMemoryBarrier2*>( this );
}
operator VkBufferMemoryBarrier2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferMemoryBarrier2*>( this );
}
operator VkBufferMemoryBarrier2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferMemoryBarrier2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferMemoryBarrier2 const & ) const = default;
#else
bool operator==( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcStageMask == rhs.srcStageMask )
&& ( srcAccessMask == rhs.srcAccessMask )
&& ( dstStageMask == rhs.dstStageMask )
&& ( dstAccessMask == rhs.dstAccessMask )
&& ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
&& ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
&& ( buffer == rhs.buffer )
&& ( offset == rhs.offset )
&& ( size == rhs.size );
#endif
}
bool operator!=( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
uint32_t srcQueueFamilyIndex = {};
uint32_t dstQueueFamilyIndex = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferMemoryBarrier2>
{
using Type = BufferMemoryBarrier2;
};
using BufferMemoryBarrier2KHR = BufferMemoryBarrier2;
// wrapper struct for struct VkBufferMemoryRequirementsInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferMemoryRequirementsInfo2.html
struct BufferMemoryRequirementsInfo2
{
using NativeType = VkBufferMemoryRequirementsInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, buffer{ buffer_ }
{}
VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferMemoryRequirementsInfo2( *reinterpret_cast<BufferMemoryRequirementsInfo2 const *>( &rhs ) )
{}
BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( this );
}
operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferMemoryRequirementsInfo2*>( this );
}
operator VkBufferMemoryRequirementsInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( this );
}
operator VkBufferMemoryRequirementsInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferMemoryRequirementsInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, buffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default;
#else
bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( buffer == rhs.buffer );
#endif
}
bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferMemoryRequirementsInfo2>
{
using Type = BufferMemoryRequirementsInfo2;
};
using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
// wrapper struct for struct VkBufferOpaqueCaptureAddressCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferOpaqueCaptureAddressCreateInfo.html
struct BufferOpaqueCaptureAddressCreateInfo
{
using NativeType = VkBufferOpaqueCaptureAddressCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo(uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, opaqueCaptureAddress{ opaqueCaptureAddress_ }
{}
VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast<BufferOpaqueCaptureAddressCreateInfo const *>( &rhs ) )
{}
BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
{
opaqueCaptureAddress = opaqueCaptureAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo*>( this );
}
operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo*>( this );
}
operator VkBufferOpaqueCaptureAddressCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo*>( this );
}
operator VkBufferOpaqueCaptureAddressCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, opaqueCaptureAddress );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default;
#else
bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
#endif
}
bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
const void * pNext = {};
uint64_t opaqueCaptureAddress = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferOpaqueCaptureAddressCreateInfo>
{
using Type = BufferOpaqueCaptureAddressCreateInfo;
};
using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;
// wrapper struct for struct VkBufferUsageFlags2CreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferUsageFlags2CreateInfo.html
struct BufferUsageFlags2CreateInfo
{
using NativeType = VkBufferUsageFlags2CreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferUsageFlags2CreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfo(VULKAN_HPP_NAMESPACE::BufferUsageFlags2 usage_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, usage{ usage_ }
{}
VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfo( BufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferUsageFlags2CreateInfo( VkBufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferUsageFlags2CreateInfo( *reinterpret_cast<BufferUsageFlags2CreateInfo const *>( &rhs ) )
{}
BufferUsageFlags2CreateInfo & operator=( BufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferUsageFlags2CreateInfo & operator=( VkBufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags2 usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferUsageFlags2CreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferUsageFlags2CreateInfo*>( this );
}
operator VkBufferUsageFlags2CreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferUsageFlags2CreateInfo*>( this );
}
operator VkBufferUsageFlags2CreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferUsageFlags2CreateInfo*>( this );
}
operator VkBufferUsageFlags2CreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferUsageFlags2CreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferUsageFlags2 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, usage );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferUsageFlags2CreateInfo const & ) const = default;
#else
bool operator==( BufferUsageFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( usage == rhs.usage );
#endif
}
bool operator!=( BufferUsageFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferUsageFlags2CreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::BufferUsageFlags2 usage = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferUsageFlags2CreateInfo>
{
using Type = BufferUsageFlags2CreateInfo;
};
using BufferUsageFlags2CreateInfoKHR = BufferUsageFlags2CreateInfo;
// wrapper struct for struct VkBufferViewCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferViewCreateInfo.html
struct BufferViewCreateInfo
{
using NativeType = VkBufferViewCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferViewCreateInfo(VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, buffer{ buffer_ }, format{ format_ }, offset{ offset_ }, range{ range_ }
{}
VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferViewCreateInfo( *reinterpret_cast<BufferViewCreateInfo const *>( &rhs ) )
{}
BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
{
range = range_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferViewCreateInfo*>( this );
}
operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferViewCreateInfo*>( this );
}
operator VkBufferViewCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBufferViewCreateInfo*>( this );
}
operator VkBufferViewCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBufferViewCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferViewCreateFlags const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, buffer, format, offset, range );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BufferViewCreateInfo const & ) const = default;
#else
bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( buffer == rhs.buffer )
&& ( format == rhs.format )
&& ( offset == rhs.offset )
&& ( range == rhs.range );
#endif
}
bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize range = {};
};
template <>
struct CppType<StructureType, StructureType::eBufferViewCreateInfo>
{
using Type = BufferViewCreateInfo;
};
// wrapper struct for struct VkStridedDeviceAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStridedDeviceAddressNV.html
struct StridedDeviceAddressNV
{
using NativeType = VkStridedDeviceAddressNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR StridedDeviceAddressNV(VULKAN_HPP_NAMESPACE::DeviceAddress startAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize strideInBytes_ = {}) VULKAN_HPP_NOEXCEPT
: startAddress{ startAddress_ }, strideInBytes{ strideInBytes_ }
{}
VULKAN_HPP_CONSTEXPR StridedDeviceAddressNV( StridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
StridedDeviceAddressNV( VkStridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT
: StridedDeviceAddressNV( *reinterpret_cast<StridedDeviceAddressNV const *>( &rhs ) )
{}
StridedDeviceAddressNV & operator=( StridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
StridedDeviceAddressNV & operator=( VkStridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressNV & setStartAddress( VULKAN_HPP_NAMESPACE::DeviceAddress startAddress_ ) VULKAN_HPP_NOEXCEPT
{
startAddress = startAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressNV & setStrideInBytes( VULKAN_HPP_NAMESPACE::DeviceSize strideInBytes_ ) VULKAN_HPP_NOEXCEPT
{
strideInBytes = strideInBytes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkStridedDeviceAddressNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkStridedDeviceAddressNV*>( this );
}
operator VkStridedDeviceAddressNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkStridedDeviceAddressNV*>( this );
}
operator VkStridedDeviceAddressNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkStridedDeviceAddressNV*>( this );
}
operator VkStridedDeviceAddressNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkStridedDeviceAddressNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( startAddress, strideInBytes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( StridedDeviceAddressNV const & ) const = default;
#else
bool operator==( StridedDeviceAddressNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( startAddress == rhs.startAddress )
&& ( strideInBytes == rhs.strideInBytes );
#endif
}
bool operator!=( StridedDeviceAddressNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress startAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize strideInBytes = {};
};
// wrapper struct for struct VkBuildPartitionedAccelerationStructureIndirectCommandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildPartitionedAccelerationStructureIndirectCommandNV.html
struct BuildPartitionedAccelerationStructureIndirectCommandNV
{
using NativeType = VkBuildPartitionedAccelerationStructureIndirectCommandNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureIndirectCommandNV(VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV opType_ = VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV::eWriteInstance, uint32_t argCount_ = {}, VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV argData_ = {}) VULKAN_HPP_NOEXCEPT
: opType{ opType_ }, argCount{ argCount_ }, argData{ argData_ }
{}
VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureIndirectCommandNV( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BuildPartitionedAccelerationStructureIndirectCommandNV( VkBuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: BuildPartitionedAccelerationStructureIndirectCommandNV( *reinterpret_cast<BuildPartitionedAccelerationStructureIndirectCommandNV const *>( &rhs ) )
{}
BuildPartitionedAccelerationStructureIndirectCommandNV & operator=( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BuildPartitionedAccelerationStructureIndirectCommandNV & operator=( VkBuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureIndirectCommandNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & setOpType( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV opType_ ) VULKAN_HPP_NOEXCEPT
{
opType = opType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & setArgCount( uint32_t argCount_ ) VULKAN_HPP_NOEXCEPT
{
argCount = argCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & setArgData( VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV const & argData_ ) VULKAN_HPP_NOEXCEPT
{
argData = argData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBuildPartitionedAccelerationStructureIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBuildPartitionedAccelerationStructureIndirectCommandNV*>( this );
}
operator VkBuildPartitionedAccelerationStructureIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBuildPartitionedAccelerationStructureIndirectCommandNV*>( this );
}
operator VkBuildPartitionedAccelerationStructureIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBuildPartitionedAccelerationStructureIndirectCommandNV*>( this );
}
operator VkBuildPartitionedAccelerationStructureIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBuildPartitionedAccelerationStructureIndirectCommandNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV const &, uint32_t const &, VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( opType, argCount, argData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BuildPartitionedAccelerationStructureIndirectCommandNV const & ) const = default;
#else
bool operator==( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( opType == rhs.opType )
&& ( argCount == rhs.argCount )
&& ( argData == rhs.argData );
#endif
}
bool operator!=( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV opType = VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV::eWriteInstance;
uint32_t argCount = {};
VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV argData = {};
};
// wrapper struct for struct VkPartitionedAccelerationStructureInstancesInputNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureInstancesInputNV.html
struct PartitionedAccelerationStructureInstancesInputNV
{
using NativeType = VkPartitionedAccelerationStructureInstancesInputNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePartitionedAccelerationStructureInstancesInputNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureInstancesInputNV(VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, uint32_t instanceCount_ = {}, uint32_t maxInstancePerPartitionCount_ = {}, uint32_t partitionCount_ = {}, uint32_t maxInstanceInGlobalPartitionCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, instanceCount{ instanceCount_ }, maxInstancePerPartitionCount{ maxInstancePerPartitionCount_ }, partitionCount{ partitionCount_ }, maxInstanceInGlobalPartitionCount{ maxInstanceInGlobalPartitionCount_ }
{}
VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureInstancesInputNV( PartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PartitionedAccelerationStructureInstancesInputNV( VkPartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PartitionedAccelerationStructureInstancesInputNV( *reinterpret_cast<PartitionedAccelerationStructureInstancesInputNV const *>( &rhs ) )
{}
PartitionedAccelerationStructureInstancesInputNV & operator=( PartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PartitionedAccelerationStructureInstancesInputNV & operator=( VkPartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
{
instanceCount = instanceCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setMaxInstancePerPartitionCount( uint32_t maxInstancePerPartitionCount_ ) VULKAN_HPP_NOEXCEPT
{
maxInstancePerPartitionCount = maxInstancePerPartitionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setPartitionCount( uint32_t partitionCount_ ) VULKAN_HPP_NOEXCEPT
{
partitionCount = partitionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setMaxInstanceInGlobalPartitionCount( uint32_t maxInstanceInGlobalPartitionCount_ ) VULKAN_HPP_NOEXCEPT
{
maxInstanceInGlobalPartitionCount = maxInstanceInGlobalPartitionCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPartitionedAccelerationStructureInstancesInputNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPartitionedAccelerationStructureInstancesInputNV*>( this );
}
operator VkPartitionedAccelerationStructureInstancesInputNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPartitionedAccelerationStructureInstancesInputNV*>( this );
}
operator VkPartitionedAccelerationStructureInstancesInputNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPartitionedAccelerationStructureInstancesInputNV*>( this );
}
operator VkPartitionedAccelerationStructureInstancesInputNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPartitionedAccelerationStructureInstancesInputNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, instanceCount, maxInstancePerPartitionCount, partitionCount, maxInstanceInGlobalPartitionCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PartitionedAccelerationStructureInstancesInputNV const & ) const = default;
#else
bool operator==( PartitionedAccelerationStructureInstancesInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( instanceCount == rhs.instanceCount )
&& ( maxInstancePerPartitionCount == rhs.maxInstancePerPartitionCount )
&& ( partitionCount == rhs.partitionCount )
&& ( maxInstanceInGlobalPartitionCount == rhs.maxInstanceInGlobalPartitionCount );
#endif
}
bool operator!=( PartitionedAccelerationStructureInstancesInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePartitionedAccelerationStructureInstancesInputNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
uint32_t instanceCount = {};
uint32_t maxInstancePerPartitionCount = {};
uint32_t partitionCount = {};
uint32_t maxInstanceInGlobalPartitionCount = {};
};
template <>
struct CppType<StructureType, StructureType::ePartitionedAccelerationStructureInstancesInputNV>
{
using Type = PartitionedAccelerationStructureInstancesInputNV;
};
// wrapper struct for struct VkBuildPartitionedAccelerationStructureInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildPartitionedAccelerationStructureInfoNV.html
struct BuildPartitionedAccelerationStructureInfoNV
{
using NativeType = VkBuildPartitionedAccelerationStructureInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBuildPartitionedAccelerationStructureInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureInfoNV(VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV input_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructureData_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress dstAccelerationStructureData_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress scratchData_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress srcInfos_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, input{ input_ }, srcAccelerationStructureData{ srcAccelerationStructureData_ }, dstAccelerationStructureData{ dstAccelerationStructureData_ }, scratchData{ scratchData_ }, srcInfos{ srcInfos_ }, srcInfosCount{ srcInfosCount_ }
{}
VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureInfoNV( BuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BuildPartitionedAccelerationStructureInfoNV( VkBuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: BuildPartitionedAccelerationStructureInfoNV( *reinterpret_cast<BuildPartitionedAccelerationStructureInfoNV const *>( &rhs ) )
{}
BuildPartitionedAccelerationStructureInfoNV & operator=( BuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
BuildPartitionedAccelerationStructureInfoNV & operator=( VkBuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setInput( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV const & input_ ) VULKAN_HPP_NOEXCEPT
{
input = input_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setSrcAccelerationStructureData( VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructureData_ ) VULKAN_HPP_NOEXCEPT
{
srcAccelerationStructureData = srcAccelerationStructureData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setDstAccelerationStructureData( VULKAN_HPP_NAMESPACE::DeviceAddress dstAccelerationStructureData_ ) VULKAN_HPP_NOEXCEPT
{
dstAccelerationStructureData = dstAccelerationStructureData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setScratchData( VULKAN_HPP_NAMESPACE::DeviceAddress scratchData_ ) VULKAN_HPP_NOEXCEPT
{
scratchData = scratchData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setSrcInfos( VULKAN_HPP_NAMESPACE::DeviceAddress srcInfos_ ) VULKAN_HPP_NOEXCEPT
{
srcInfos = srcInfos_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setSrcInfosCount( VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount_ ) VULKAN_HPP_NOEXCEPT
{
srcInfosCount = srcInfosCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkBuildPartitionedAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBuildPartitionedAccelerationStructureInfoNV*>( this );
}
operator VkBuildPartitionedAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBuildPartitionedAccelerationStructureInfoNV*>( this );
}
operator VkBuildPartitionedAccelerationStructureInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkBuildPartitionedAccelerationStructureInfoNV*>( this );
}
operator VkBuildPartitionedAccelerationStructureInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkBuildPartitionedAccelerationStructureInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, input, srcAccelerationStructureData, dstAccelerationStructureData, scratchData, srcInfos, srcInfosCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( BuildPartitionedAccelerationStructureInfoNV const & ) const = default;
#else
bool operator==( BuildPartitionedAccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( input == rhs.input )
&& ( srcAccelerationStructureData == rhs.srcAccelerationStructureData )
&& ( dstAccelerationStructureData == rhs.dstAccelerationStructureData )
&& ( scratchData == rhs.scratchData )
&& ( srcInfos == rhs.srcInfos )
&& ( srcInfosCount == rhs.srcInfosCount );
#endif
}
bool operator!=( BuildPartitionedAccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBuildPartitionedAccelerationStructureInfoNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV input = {};
VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructureData = {};
VULKAN_HPP_NAMESPACE::DeviceAddress dstAccelerationStructureData = {};
VULKAN_HPP_NAMESPACE::DeviceAddress scratchData = {};
VULKAN_HPP_NAMESPACE::DeviceAddress srcInfos = {};
VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount = {};
};
template <>
struct CppType<StructureType, StructureType::eBuildPartitionedAccelerationStructureInfoNV>
{
using Type = BuildPartitionedAccelerationStructureInfoNV;
};
// wrapper struct for struct VkCalibratedTimestampInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCalibratedTimestampInfoKHR.html
struct CalibratedTimestampInfoKHR
{
using NativeType = VkCalibratedTimestampInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR(VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainKHR::eDevice, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, timeDomain{ timeDomain_ }
{}
VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR( CalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CalibratedTimestampInfoKHR( VkCalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: CalibratedTimestampInfoKHR( *reinterpret_cast<CalibratedTimestampInfoKHR const *>( &rhs ) )
{}
CalibratedTimestampInfoKHR & operator=( CalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CalibratedTimestampInfoKHR & operator=( VkCalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain_ ) VULKAN_HPP_NOEXCEPT
{
timeDomain = timeDomain_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCalibratedTimestampInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCalibratedTimestampInfoKHR*>( this );
}
operator VkCalibratedTimestampInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCalibratedTimestampInfoKHR*>( this );
}
operator VkCalibratedTimestampInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCalibratedTimestampInfoKHR*>( this );
}
operator VkCalibratedTimestampInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCalibratedTimestampInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TimeDomainKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, timeDomain );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CalibratedTimestampInfoKHR const & ) const = default;
#else
bool operator==( CalibratedTimestampInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( timeDomain == rhs.timeDomain );
#endif
}
bool operator!=( CalibratedTimestampInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainKHR::eDevice;
};
template <>
struct CppType<StructureType, StructureType::eCalibratedTimestampInfoKHR>
{
using Type = CalibratedTimestampInfoKHR;
};
using CalibratedTimestampInfoEXT = CalibratedTimestampInfoKHR;
// wrapper struct for struct VkCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCheckpointData2NV.html
struct CheckpointData2NV
{
using NativeType = VkCheckpointData2NV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CheckpointData2NV(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stage{ stage_ }, pCheckpointMarker{ pCheckpointMarker_ }
{}
VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
: CheckpointData2NV( *reinterpret_cast<CheckpointData2NV const *>( &rhs ) )
{}
CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointData2NV const *>( &rhs );
return *this;
}
operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCheckpointData2NV*>( this );
}
operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCheckpointData2NV*>( this );
}
operator VkCheckpointData2NV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCheckpointData2NV*>( this );
}
operator VkCheckpointData2NV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCheckpointData2NV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stage, pCheckpointMarker );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CheckpointData2NV const & ) const = default;
#else
bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stage == rhs.stage )
&& ( pCheckpointMarker == rhs.pCheckpointMarker );
#endif
}
bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2NV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage = {};
void * pCheckpointMarker = {};
};
template <>
struct CppType<StructureType, StructureType::eCheckpointData2NV>
{
using Type = CheckpointData2NV;
};
// wrapper struct for struct VkCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCheckpointDataNV.html
struct CheckpointDataNV
{
using NativeType = VkCheckpointDataNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CheckpointDataNV(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stage{ stage_ }, pCheckpointMarker{ pCheckpointMarker_ }
{}
VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CheckpointDataNV( *reinterpret_cast<CheckpointDataNV const *>( &rhs ) )
{}
CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointDataNV const *>( &rhs );
return *this;
}
operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCheckpointDataNV*>( this );
}
operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCheckpointDataNV*>( this );
}
operator VkCheckpointDataNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCheckpointDataNV*>( this );
}
operator VkCheckpointDataNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCheckpointDataNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlagBits const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stage, pCheckpointMarker );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CheckpointDataNV const & ) const = default;
#else
bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stage == rhs.stage )
&& ( pCheckpointMarker == rhs.pCheckpointMarker );
#endif
}
bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe;
void * pCheckpointMarker = {};
};
template <>
struct CppType<StructureType, StructureType::eCheckpointDataNV>
{
using Type = CheckpointDataNV;
};
union ClearColorValue
{
using NativeType = VkClearColorValue;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<float,4>& float32_ = {} )
: float32( float32_ )
{}
VULKAN_HPP_CONSTEXPR ClearColorValue( float float32_0, float float32_1, float float32_2, float float32_3 )
: float32{ { { float32_0, float32_1, float32_2, float32_3 } } }
{}
VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<int32_t,4>& int32_ )
: int32( int32_ )
{}
VULKAN_HPP_CONSTEXPR ClearColorValue( int32_t int32_0, int32_t int32_1, int32_t int32_2, int32_t int32_3 )
: int32{ { { int32_0, int32_1, int32_2, int32_3 } } }
{}
VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<uint32_t,4>& uint32_ )
: uint32( uint32_ )
{}
VULKAN_HPP_CONSTEXPR ClearColorValue( uint32_t uint32_0, uint32_t uint32_1, uint32_t uint32_2, uint32_t uint32_3 )
: uint32{ { { uint32_0, uint32_1, uint32_2, uint32_3 } } }
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setFloat32( std::array<float,4> float32_ ) VULKAN_HPP_NOEXCEPT
{
float32 = float32_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setInt32( std::array<int32_t,4> int32_ ) VULKAN_HPP_NOEXCEPT
{
int32 = int32_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setUint32( std::array<uint32_t,4> uint32_ ) VULKAN_HPP_NOEXCEPT
{
uint32 = uint32_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClearColorValue const &() const
{
return *reinterpret_cast<const VkClearColorValue*>( this );
}
operator VkClearColorValue &()
{
return *reinterpret_cast<VkClearColorValue*>( this );
}
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> float32;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, 4> int32;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 4> uint32;
};
// wrapper struct for struct VkClearDepthStencilValue, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClearDepthStencilValue.html
struct ClearDepthStencilValue
{
using NativeType = VkClearDepthStencilValue;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ClearDepthStencilValue(float depth_ = {}, uint32_t stencil_ = {}) VULKAN_HPP_NOEXCEPT
: depth{ depth_ }, stencil{ stencil_ }
{}
VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
: ClearDepthStencilValue( *reinterpret_cast<ClearDepthStencilValue const *>( &rhs ) )
{}
ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT
{
depth = depth_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT
{
stencil = stencil_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClearDepthStencilValue*>( this );
}
operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClearDepthStencilValue*>( this );
}
operator VkClearDepthStencilValue const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClearDepthStencilValue*>( this );
}
operator VkClearDepthStencilValue *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClearDepthStencilValue*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<float const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( depth, stencil );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ClearDepthStencilValue const & ) const = default;
#else
bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( depth == rhs.depth )
&& ( stencil == rhs.stencil );
#endif
}
bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
float depth = {};
uint32_t stencil = {};
};
union ClearValue
{
using NativeType = VkClearValue;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} )
: color( color_ )
{}
VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ )
: depthStencil( depthStencil_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT
{
color = color_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT
{
depthStencil = depthStencil_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClearValue const &() const
{
return *reinterpret_cast<const VkClearValue*>( this );
}
operator VkClearValue &()
{
return *reinterpret_cast<VkClearValue*>( this );
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
VULKAN_HPP_NAMESPACE::ClearColorValue color;
VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil;
#else
VkClearColorValue color;
VkClearDepthStencilValue depthStencil;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
// wrapper struct for struct VkClearAttachment, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClearAttachment.html
struct ClearAttachment
{
using NativeType = VkClearAttachment;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ClearAttachment(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t colorAttachment_ = {}, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}) VULKAN_HPP_NOEXCEPT
: aspectMask{ aspectMask_ }, colorAttachment{ colorAttachment_ }, clearValue{ clearValue_ }
{}
VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
: ClearAttachment( *reinterpret_cast<ClearAttachment const *>( &rhs ) )
{}
ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearAttachment const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachment = colorAttachment_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
{
clearValue = clearValue_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClearAttachment*>( this );
}
operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClearAttachment*>( this );
}
operator VkClearAttachment const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClearAttachment*>( this );
}
operator VkClearAttachment *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClearAttachment*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ClearValue const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( aspectMask, colorAttachment, clearValue );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
uint32_t colorAttachment = {};
VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
};
// wrapper struct for struct VkClearRect, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClearRect.html
struct ClearRect
{
using NativeType = VkClearRect;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ClearRect(VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
: rect{ rect_ }, baseArrayLayer{ baseArrayLayer_ }, layerCount{ layerCount_ }
{}
VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
: ClearRect( *reinterpret_cast<ClearRect const *>( &rhs ) )
{}
ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearRect const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT
{
rect = rect_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
{
baseArrayLayer = baseArrayLayer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
{
layerCount = layerCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClearRect*>( this );
}
operator VkClearRect &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClearRect*>( this );
}
operator VkClearRect const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClearRect*>( this );
}
operator VkClearRect *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClearRect*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Rect2D const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( rect, baseArrayLayer, layerCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ClearRect const & ) const = default;
#else
bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( rect == rhs.rect )
&& ( baseArrayLayer == rhs.baseArrayLayer )
&& ( layerCount == rhs.layerCount );
#endif
}
bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Rect2D rect = {};
uint32_t baseArrayLayer = {};
uint32_t layerCount = {};
};
// wrapper struct for struct VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV.html
struct ClusterAccelerationStructureBuildClustersBottomLevelInfoNV
{
using NativeType = VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildClustersBottomLevelInfoNV(uint32_t clusterReferencesCount_ = {}, uint32_t clusterReferencesStride_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress clusterReferences_ = {}) VULKAN_HPP_NOEXCEPT
: clusterReferencesCount{ clusterReferencesCount_ }, clusterReferencesStride{ clusterReferencesStride_ }, clusterReferences{ clusterReferences_ }
{}
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( *reinterpret_cast<ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const *>( &rhs ) )
{}
ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & operator=( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & operator=( VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & setClusterReferencesCount( uint32_t clusterReferencesCount_ ) VULKAN_HPP_NOEXCEPT
{
clusterReferencesCount = clusterReferencesCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & setClusterReferencesStride( uint32_t clusterReferencesStride_ ) VULKAN_HPP_NOEXCEPT
{
clusterReferencesStride = clusterReferencesStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & setClusterReferences( VULKAN_HPP_NAMESPACE::DeviceAddress clusterReferences_ ) VULKAN_HPP_NOEXCEPT
{
clusterReferences = clusterReferences_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV*>( this );
}
operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV*>( this );
}
operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV*>( this );
}
operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( clusterReferencesCount, clusterReferencesStride, clusterReferences );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & ) const = default;
#else
bool operator==( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( clusterReferencesCount == rhs.clusterReferencesCount )
&& ( clusterReferencesStride == rhs.clusterReferencesStride )
&& ( clusterReferences == rhs.clusterReferences );
#endif
}
bool operator!=( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t clusterReferencesCount = {};
uint32_t clusterReferencesStride = {};
VULKAN_HPP_NAMESPACE::DeviceAddress clusterReferences = {};
};
// wrapper struct for struct VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV.html
struct ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV
{
using NativeType = VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV(uint32_t geometryIndex_ = {}, uint32_t reserved_ = {}, uint32_t geometryFlags_ = {}) VULKAN_HPP_NOEXCEPT
: geometryIndex{ geometryIndex_ }, reserved{ reserved_ }, geometryFlags{ geometryFlags_ }
{}
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( *reinterpret_cast<ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const *>( &rhs ) )
{}
ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & operator=( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & operator=( VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setGeometryIndex( uint32_t geometryIndex_ ) VULKAN_HPP_NOEXCEPT
{
geometryIndex = geometryIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setReserved( uint32_t reserved_ ) VULKAN_HPP_NOEXCEPT
{
reserved = reserved_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setGeometryFlags( uint32_t geometryFlags_ ) VULKAN_HPP_NOEXCEPT
{
geometryFlags = geometryFlags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV*>( this );
}
operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV*>( this );
}
operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV*>( this );
}
operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( geometryIndex, reserved, geometryFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & ) const = default;
#else
bool operator==( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( geometryIndex == rhs.geometryIndex )
&& ( reserved == rhs.reserved )
&& ( geometryFlags == rhs.geometryFlags );
#endif
}
bool operator!=( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t geometryIndex : 24;
uint32_t reserved : 5;
uint32_t geometryFlags : 3;
};
// wrapper struct for struct VkClusterAccelerationStructureBuildTriangleClusterInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureBuildTriangleClusterInfoNV.html
struct ClusterAccelerationStructureBuildTriangleClusterInfoNV
{
using NativeType = VkClusterAccelerationStructureBuildTriangleClusterInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterInfoNV(uint32_t clusterID_ = {}, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags_ = {}, uint32_t triangleCount_ = {}, uint32_t vertexCount_ = {}, uint32_t positionTruncateBitCount_ = {}, uint32_t indexType_ = {}, uint32_t opacityMicromapIndexType_ = {}, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags_ = {}, uint16_t indexBufferStride_ = {}, uint16_t vertexBufferStride_ = {}, uint16_t geometryIndexAndFlagsBufferStride_ = {}, uint16_t opacityMicromapIndexBufferStride_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer_ = {}) VULKAN_HPP_NOEXCEPT
: clusterID{ clusterID_ }, clusterFlags{ clusterFlags_ }, triangleCount{ triangleCount_ }, vertexCount{ vertexCount_ }, positionTruncateBitCount{ positionTruncateBitCount_ }, indexType{ indexType_ }, opacityMicromapIndexType{ opacityMicromapIndexType_ }, baseGeometryIndexAndGeometryFlags{ baseGeometryIndexAndGeometryFlags_ }, indexBufferStride{ indexBufferStride_ }, vertexBufferStride{ vertexBufferStride_ }, geometryIndexAndFlagsBufferStride{ geometryIndexAndFlagsBufferStride_ }, opacityMicromapIndexBufferStride{ opacityMicromapIndexBufferStride_ }, indexBuffer{ indexBuffer_ }, vertexBuffer{ vertexBuffer_ }, geometryIndexAndFlagsBuffer{ geometryIndexAndFlagsBuffer_ }, opacityMicromapArray{ opacityMicromapArray_ }, opacityMicromapIndexBuffer{ opacityMicromapIndexBuffer_ }
{}
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterInfoNV( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClusterAccelerationStructureBuildTriangleClusterInfoNV( VkClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ClusterAccelerationStructureBuildTriangleClusterInfoNV( *reinterpret_cast<ClusterAccelerationStructureBuildTriangleClusterInfoNV const *>( &rhs ) )
{}
ClusterAccelerationStructureBuildTriangleClusterInfoNV & operator=( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClusterAccelerationStructureBuildTriangleClusterInfoNV & operator=( VkClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildTriangleClusterInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setClusterID( uint32_t clusterID_ ) VULKAN_HPP_NOEXCEPT
{
clusterID = clusterID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setClusterFlags( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags_ ) VULKAN_HPP_NOEXCEPT
{
clusterFlags = clusterFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setTriangleCount( uint32_t triangleCount_ ) VULKAN_HPP_NOEXCEPT
{
triangleCount = triangleCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
{
vertexCount = vertexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setPositionTruncateBitCount( uint32_t positionTruncateBitCount_ ) VULKAN_HPP_NOEXCEPT
{
positionTruncateBitCount = positionTruncateBitCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexType( uint32_t indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setOpacityMicromapIndexType( uint32_t opacityMicromapIndexType_ ) VULKAN_HPP_NOEXCEPT
{
opacityMicromapIndexType = opacityMicromapIndexType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setBaseGeometryIndexAndGeometryFlags( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & baseGeometryIndexAndGeometryFlags_ ) VULKAN_HPP_NOEXCEPT
{
baseGeometryIndexAndGeometryFlags = baseGeometryIndexAndGeometryFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexBufferStride( uint16_t indexBufferStride_ ) VULKAN_HPP_NOEXCEPT
{
indexBufferStride = indexBufferStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexBufferStride( uint16_t vertexBufferStride_ ) VULKAN_HPP_NOEXCEPT
{
vertexBufferStride = vertexBufferStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setGeometryIndexAndFlagsBufferStride( uint16_t geometryIndexAndFlagsBufferStride_ ) VULKAN_HPP_NOEXCEPT
{
geometryIndexAndFlagsBufferStride = geometryIndexAndFlagsBufferStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setOpacityMicromapIndexBufferStride( uint16_t opacityMicromapIndexBufferStride_ ) VULKAN_HPP_NOEXCEPT
{
opacityMicromapIndexBufferStride = opacityMicromapIndexBufferStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
indexBuffer = indexBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
vertexBuffer = vertexBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setGeometryIndexAndFlagsBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer_ ) VULKAN_HPP_NOEXCEPT
{
geometryIndexAndFlagsBuffer = geometryIndexAndFlagsBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setOpacityMicromapArray( VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray_ ) VULKAN_HPP_NOEXCEPT
{
opacityMicromapArray = opacityMicromapArray_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setOpacityMicromapIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
opacityMicromapIndexBuffer = opacityMicromapIndexBuffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClusterAccelerationStructureBuildTriangleClusterInfoNV*>( this );
}
operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClusterAccelerationStructureBuildTriangleClusterInfoNV*>( this );
}
operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClusterAccelerationStructureBuildTriangleClusterInfoNV*>( this );
}
operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClusterAccelerationStructureBuildTriangleClusterInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const &, uint16_t const &, uint16_t const &, uint16_t const &, uint16_t const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( clusterID, clusterFlags, triangleCount, vertexCount, positionTruncateBitCount, indexType, opacityMicromapIndexType, baseGeometryIndexAndGeometryFlags, indexBufferStride, vertexBufferStride, geometryIndexAndFlagsBufferStride, opacityMicromapIndexBufferStride, indexBuffer, vertexBuffer, geometryIndexAndFlagsBuffer, opacityMicromapArray, opacityMicromapIndexBuffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & ) const = default;
#else
bool operator==( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( clusterID == rhs.clusterID )
&& ( clusterFlags == rhs.clusterFlags )
&& ( triangleCount == rhs.triangleCount )
&& ( vertexCount == rhs.vertexCount )
&& ( positionTruncateBitCount == rhs.positionTruncateBitCount )
&& ( indexType == rhs.indexType )
&& ( opacityMicromapIndexType == rhs.opacityMicromapIndexType )
&& ( baseGeometryIndexAndGeometryFlags == rhs.baseGeometryIndexAndGeometryFlags )
&& ( indexBufferStride == rhs.indexBufferStride )
&& ( vertexBufferStride == rhs.vertexBufferStride )
&& ( geometryIndexAndFlagsBufferStride == rhs.geometryIndexAndFlagsBufferStride )
&& ( opacityMicromapIndexBufferStride == rhs.opacityMicromapIndexBufferStride )
&& ( indexBuffer == rhs.indexBuffer )
&& ( vertexBuffer == rhs.vertexBuffer )
&& ( geometryIndexAndFlagsBuffer == rhs.geometryIndexAndFlagsBuffer )
&& ( opacityMicromapArray == rhs.opacityMicromapArray )
&& ( opacityMicromapIndexBuffer == rhs.opacityMicromapIndexBuffer );
#endif
}
bool operator!=( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t clusterID = {};
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags = {};
uint32_t triangleCount : 9;
uint32_t vertexCount : 9;
uint32_t positionTruncateBitCount : 6;
uint32_t indexType : 4;
uint32_t opacityMicromapIndexType : 4;
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags = {};
uint16_t indexBufferStride = {};
uint16_t vertexBufferStride = {};
uint16_t geometryIndexAndFlagsBufferStride = {};
uint16_t opacityMicromapIndexBufferStride = {};
VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray = {};
VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer = {};
};
// wrapper struct for struct VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.html
struct ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV
{
using NativeType = VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV(uint32_t clusterID_ = {}, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags_ = {}, uint32_t triangleCount_ = {}, uint32_t vertexCount_ = {}, uint32_t positionTruncateBitCount_ = {}, uint32_t indexType_ = {}, uint32_t opacityMicromapIndexType_ = {}, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags_ = {}, uint16_t indexBufferStride_ = {}, uint16_t vertexBufferStride_ = {}, uint16_t geometryIndexAndFlagsBufferStride_ = {}, uint16_t opacityMicromapIndexBufferStride_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress instantiationBoundingBoxLimit_ = {}) VULKAN_HPP_NOEXCEPT
: clusterID{ clusterID_ }, clusterFlags{ clusterFlags_ }, triangleCount{ triangleCount_ }, vertexCount{ vertexCount_ }, positionTruncateBitCount{ positionTruncateBitCount_ }, indexType{ indexType_ }, opacityMicromapIndexType{ opacityMicromapIndexType_ }, baseGeometryIndexAndGeometryFlags{ baseGeometryIndexAndGeometryFlags_ }, indexBufferStride{ indexBufferStride_ }, vertexBufferStride{ vertexBufferStride_ }, geometryIndexAndFlagsBufferStride{ geometryIndexAndFlagsBufferStride_ }, opacityMicromapIndexBufferStride{ opacityMicromapIndexBufferStride_ }, indexBuffer{ indexBuffer_ }, vertexBuffer{ vertexBuffer_ }, geometryIndexAndFlagsBuffer{ geometryIndexAndFlagsBuffer_ }, opacityMicromapArray{ opacityMicromapArray_ }, opacityMicromapIndexBuffer{ opacityMicromapIndexBuffer_ }, instantiationBoundingBoxLimit{ instantiationBoundingBoxLimit_ }
{}
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( *reinterpret_cast<ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const *>( &rhs ) )
{}
explicit ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & clusterAccelerationStructureBuildTriangleClusterInfoNV, VULKAN_HPP_NAMESPACE::DeviceAddress instantiationBoundingBoxLimit_ = {} )
: clusterID( clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterID )
, clusterFlags( clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterFlags )
, triangleCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.triangleCount )
, vertexCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexCount )
, positionTruncateBitCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.positionTruncateBitCount )
, indexType( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexType )
, opacityMicromapIndexType( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexType )
, baseGeometryIndexAndGeometryFlags( clusterAccelerationStructureBuildTriangleClusterInfoNV.baseGeometryIndexAndGeometryFlags )
, indexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBufferStride )
, vertexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBufferStride )
, geometryIndexAndFlagsBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBufferStride )
, opacityMicromapIndexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBufferStride )
, indexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBuffer )
, vertexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBuffer )
, geometryIndexAndFlagsBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBuffer )
, opacityMicromapArray( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapArray )
, opacityMicromapIndexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBuffer )
, instantiationBoundingBoxLimit( instantiationBoundingBoxLimit_ )
{}
ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & operator=( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & operator=( VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setClusterID( uint32_t clusterID_ ) VULKAN_HPP_NOEXCEPT
{
clusterID = clusterID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setClusterFlags( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags_ ) VULKAN_HPP_NOEXCEPT
{
clusterFlags = clusterFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setTriangleCount( uint32_t triangleCount_ ) VULKAN_HPP_NOEXCEPT
{
triangleCount = triangleCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
{
vertexCount = vertexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setPositionTruncateBitCount( uint32_t positionTruncateBitCount_ ) VULKAN_HPP_NOEXCEPT
{
positionTruncateBitCount = positionTruncateBitCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setIndexType( uint32_t indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setOpacityMicromapIndexType( uint32_t opacityMicromapIndexType_ ) VULKAN_HPP_NOEXCEPT
{
opacityMicromapIndexType = opacityMicromapIndexType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setBaseGeometryIndexAndGeometryFlags( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & baseGeometryIndexAndGeometryFlags_ ) VULKAN_HPP_NOEXCEPT
{
baseGeometryIndexAndGeometryFlags = baseGeometryIndexAndGeometryFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setIndexBufferStride( uint16_t indexBufferStride_ ) VULKAN_HPP_NOEXCEPT
{
indexBufferStride = indexBufferStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setVertexBufferStride( uint16_t vertexBufferStride_ ) VULKAN_HPP_NOEXCEPT
{
vertexBufferStride = vertexBufferStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setGeometryIndexAndFlagsBufferStride( uint16_t geometryIndexAndFlagsBufferStride_ ) VULKAN_HPP_NOEXCEPT
{
geometryIndexAndFlagsBufferStride = geometryIndexAndFlagsBufferStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setOpacityMicromapIndexBufferStride( uint16_t opacityMicromapIndexBufferStride_ ) VULKAN_HPP_NOEXCEPT
{
opacityMicromapIndexBufferStride = opacityMicromapIndexBufferStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
indexBuffer = indexBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setVertexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
vertexBuffer = vertexBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setGeometryIndexAndFlagsBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer_ ) VULKAN_HPP_NOEXCEPT
{
geometryIndexAndFlagsBuffer = geometryIndexAndFlagsBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setOpacityMicromapArray( VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray_ ) VULKAN_HPP_NOEXCEPT
{
opacityMicromapArray = opacityMicromapArray_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setOpacityMicromapIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
opacityMicromapIndexBuffer = opacityMicromapIndexBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setInstantiationBoundingBoxLimit( VULKAN_HPP_NAMESPACE::DeviceAddress instantiationBoundingBoxLimit_ ) VULKAN_HPP_NOEXCEPT
{
instantiationBoundingBoxLimit = instantiationBoundingBoxLimit_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV*>( this );
}
operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV*>( this );
}
operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV*>( this );
}
operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const &, uint16_t const &, uint16_t const &, uint16_t const &, uint16_t const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( clusterID, clusterFlags, triangleCount, vertexCount, positionTruncateBitCount, indexType, opacityMicromapIndexType, baseGeometryIndexAndGeometryFlags, indexBufferStride, vertexBufferStride, geometryIndexAndFlagsBufferStride, opacityMicromapIndexBufferStride, indexBuffer, vertexBuffer, geometryIndexAndFlagsBuffer, opacityMicromapArray, opacityMicromapIndexBuffer, instantiationBoundingBoxLimit );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & ) const = default;
#else
bool operator==( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( clusterID == rhs.clusterID )
&& ( clusterFlags == rhs.clusterFlags )
&& ( triangleCount == rhs.triangleCount )
&& ( vertexCount == rhs.vertexCount )
&& ( positionTruncateBitCount == rhs.positionTruncateBitCount )
&& ( indexType == rhs.indexType )
&& ( opacityMicromapIndexType == rhs.opacityMicromapIndexType )
&& ( baseGeometryIndexAndGeometryFlags == rhs.baseGeometryIndexAndGeometryFlags )
&& ( indexBufferStride == rhs.indexBufferStride )
&& ( vertexBufferStride == rhs.vertexBufferStride )
&& ( geometryIndexAndFlagsBufferStride == rhs.geometryIndexAndFlagsBufferStride )
&& ( opacityMicromapIndexBufferStride == rhs.opacityMicromapIndexBufferStride )
&& ( indexBuffer == rhs.indexBuffer )
&& ( vertexBuffer == rhs.vertexBuffer )
&& ( geometryIndexAndFlagsBuffer == rhs.geometryIndexAndFlagsBuffer )
&& ( opacityMicromapArray == rhs.opacityMicromapArray )
&& ( opacityMicromapIndexBuffer == rhs.opacityMicromapIndexBuffer )
&& ( instantiationBoundingBoxLimit == rhs.instantiationBoundingBoxLimit );
#endif
}
bool operator!=( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t clusterID = {};
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags = {};
uint32_t triangleCount : 9;
uint32_t vertexCount : 9;
uint32_t positionTruncateBitCount : 6;
uint32_t indexType : 4;
uint32_t opacityMicromapIndexType : 4;
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags = {};
uint16_t indexBufferStride = {};
uint16_t vertexBufferStride = {};
uint16_t geometryIndexAndFlagsBufferStride = {};
uint16_t opacityMicromapIndexBufferStride = {};
VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray = {};
VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceAddress instantiationBoundingBoxLimit = {};
};
// wrapper struct for struct VkClusterAccelerationStructureClustersBottomLevelInputNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureClustersBottomLevelInputNV.html
struct ClusterAccelerationStructureClustersBottomLevelInputNV
{
using NativeType = VkClusterAccelerationStructureClustersBottomLevelInputNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureClustersBottomLevelInputNV(uint32_t maxTotalClusterCount_ = {}, uint32_t maxClusterCountPerAccelerationStructure_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxTotalClusterCount{ maxTotalClusterCount_ }, maxClusterCountPerAccelerationStructure{ maxClusterCountPerAccelerationStructure_ }
{}
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureClustersBottomLevelInputNV( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClusterAccelerationStructureClustersBottomLevelInputNV( VkClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ClusterAccelerationStructureClustersBottomLevelInputNV( *reinterpret_cast<ClusterAccelerationStructureClustersBottomLevelInputNV const *>( &rhs ) )
{}
ClusterAccelerationStructureClustersBottomLevelInputNV & operator=( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClusterAccelerationStructureClustersBottomLevelInputNV & operator=( VkClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & setMaxTotalClusterCount( uint32_t maxTotalClusterCount_ ) VULKAN_HPP_NOEXCEPT
{
maxTotalClusterCount = maxTotalClusterCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & setMaxClusterCountPerAccelerationStructure( uint32_t maxClusterCountPerAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
maxClusterCountPerAccelerationStructure = maxClusterCountPerAccelerationStructure_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClusterAccelerationStructureClustersBottomLevelInputNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClusterAccelerationStructureClustersBottomLevelInputNV*>( this );
}
operator VkClusterAccelerationStructureClustersBottomLevelInputNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClusterAccelerationStructureClustersBottomLevelInputNV*>( this );
}
operator VkClusterAccelerationStructureClustersBottomLevelInputNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClusterAccelerationStructureClustersBottomLevelInputNV*>( this );
}
operator VkClusterAccelerationStructureClustersBottomLevelInputNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClusterAccelerationStructureClustersBottomLevelInputNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxTotalClusterCount, maxClusterCountPerAccelerationStructure );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ClusterAccelerationStructureClustersBottomLevelInputNV const & ) const = default;
#else
bool operator==( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxTotalClusterCount == rhs.maxTotalClusterCount )
&& ( maxClusterCountPerAccelerationStructure == rhs.maxClusterCountPerAccelerationStructure );
#endif
}
bool operator!=( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV;
void * pNext = {};
uint32_t maxTotalClusterCount = {};
uint32_t maxClusterCountPerAccelerationStructure = {};
};
template <>
struct CppType<StructureType, StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV>
{
using Type = ClusterAccelerationStructureClustersBottomLevelInputNV;
};
// wrapper struct for struct VkClusterAccelerationStructureTriangleClusterInputNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureTriangleClusterInputNV.html
struct ClusterAccelerationStructureTriangleClusterInputNV
{
using NativeType = VkClusterAccelerationStructureTriangleClusterInputNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureTriangleClusterInputNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureTriangleClusterInputNV(VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t maxGeometryIndexValue_ = {}, uint32_t maxClusterUniqueGeometryCount_ = {}, uint32_t maxClusterTriangleCount_ = {}, uint32_t maxClusterVertexCount_ = {}, uint32_t maxTotalTriangleCount_ = {}, uint32_t maxTotalVertexCount_ = {}, uint32_t minPositionTruncateBitCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, vertexFormat{ vertexFormat_ }, maxGeometryIndexValue{ maxGeometryIndexValue_ }, maxClusterUniqueGeometryCount{ maxClusterUniqueGeometryCount_ }, maxClusterTriangleCount{ maxClusterTriangleCount_ }, maxClusterVertexCount{ maxClusterVertexCount_ }, maxTotalTriangleCount{ maxTotalTriangleCount_ }, maxTotalVertexCount{ maxTotalVertexCount_ }, minPositionTruncateBitCount{ minPositionTruncateBitCount_ }
{}
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureTriangleClusterInputNV( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClusterAccelerationStructureTriangleClusterInputNV( VkClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ClusterAccelerationStructureTriangleClusterInputNV( *reinterpret_cast<ClusterAccelerationStructureTriangleClusterInputNV const *>( &rhs ) )
{}
ClusterAccelerationStructureTriangleClusterInputNV & operator=( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClusterAccelerationStructureTriangleClusterInputNV & operator=( VkClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
{
vertexFormat = vertexFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxGeometryIndexValue( uint32_t maxGeometryIndexValue_ ) VULKAN_HPP_NOEXCEPT
{
maxGeometryIndexValue = maxGeometryIndexValue_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxClusterUniqueGeometryCount( uint32_t maxClusterUniqueGeometryCount_ ) VULKAN_HPP_NOEXCEPT
{
maxClusterUniqueGeometryCount = maxClusterUniqueGeometryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxClusterTriangleCount( uint32_t maxClusterTriangleCount_ ) VULKAN_HPP_NOEXCEPT
{
maxClusterTriangleCount = maxClusterTriangleCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxClusterVertexCount( uint32_t maxClusterVertexCount_ ) VULKAN_HPP_NOEXCEPT
{
maxClusterVertexCount = maxClusterVertexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxTotalTriangleCount( uint32_t maxTotalTriangleCount_ ) VULKAN_HPP_NOEXCEPT
{
maxTotalTriangleCount = maxTotalTriangleCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxTotalVertexCount( uint32_t maxTotalVertexCount_ ) VULKAN_HPP_NOEXCEPT
{
maxTotalVertexCount = maxTotalVertexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMinPositionTruncateBitCount( uint32_t minPositionTruncateBitCount_ ) VULKAN_HPP_NOEXCEPT
{
minPositionTruncateBitCount = minPositionTruncateBitCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClusterAccelerationStructureTriangleClusterInputNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClusterAccelerationStructureTriangleClusterInputNV*>( this );
}
operator VkClusterAccelerationStructureTriangleClusterInputNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClusterAccelerationStructureTriangleClusterInputNV*>( this );
}
operator VkClusterAccelerationStructureTriangleClusterInputNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClusterAccelerationStructureTriangleClusterInputNV*>( this );
}
operator VkClusterAccelerationStructureTriangleClusterInputNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClusterAccelerationStructureTriangleClusterInputNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, vertexFormat, maxGeometryIndexValue, maxClusterUniqueGeometryCount, maxClusterTriangleCount, maxClusterVertexCount, maxTotalTriangleCount, maxTotalVertexCount, minPositionTruncateBitCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ClusterAccelerationStructureTriangleClusterInputNV const & ) const = default;
#else
bool operator==( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( vertexFormat == rhs.vertexFormat )
&& ( maxGeometryIndexValue == rhs.maxGeometryIndexValue )
&& ( maxClusterUniqueGeometryCount == rhs.maxClusterUniqueGeometryCount )
&& ( maxClusterTriangleCount == rhs.maxClusterTriangleCount )
&& ( maxClusterVertexCount == rhs.maxClusterVertexCount )
&& ( maxTotalTriangleCount == rhs.maxTotalTriangleCount )
&& ( maxTotalVertexCount == rhs.maxTotalVertexCount )
&& ( minPositionTruncateBitCount == rhs.minPositionTruncateBitCount );
#endif
}
bool operator!=( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureTriangleClusterInputNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
uint32_t maxGeometryIndexValue = {};
uint32_t maxClusterUniqueGeometryCount = {};
uint32_t maxClusterTriangleCount = {};
uint32_t maxClusterVertexCount = {};
uint32_t maxTotalTriangleCount = {};
uint32_t maxTotalVertexCount = {};
uint32_t minPositionTruncateBitCount = {};
};
template <>
struct CppType<StructureType, StructureType::eClusterAccelerationStructureTriangleClusterInputNV>
{
using Type = ClusterAccelerationStructureTriangleClusterInputNV;
};
// wrapper struct for struct VkClusterAccelerationStructureMoveObjectsInputNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureMoveObjectsInputNV.html
struct ClusterAccelerationStructureMoveObjectsInputNV
{
using NativeType = VkClusterAccelerationStructureMoveObjectsInputNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureMoveObjectsInputNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInputNV(VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV type_ = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV::eClustersBottomLevel, VULKAN_HPP_NAMESPACE::Bool32 noMoveOverlap_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMovedBytes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, type{ type_ }, noMoveOverlap{ noMoveOverlap_ }, maxMovedBytes{ maxMovedBytes_ }
{}
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInputNV( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClusterAccelerationStructureMoveObjectsInputNV( VkClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ClusterAccelerationStructureMoveObjectsInputNV( *reinterpret_cast<ClusterAccelerationStructureMoveObjectsInputNV const *>( &rhs ) )
{}
ClusterAccelerationStructureMoveObjectsInputNV & operator=( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClusterAccelerationStructureMoveObjectsInputNV & operator=( VkClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setType( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setNoMoveOverlap( VULKAN_HPP_NAMESPACE::Bool32 noMoveOverlap_ ) VULKAN_HPP_NOEXCEPT
{
noMoveOverlap = noMoveOverlap_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setMaxMovedBytes( VULKAN_HPP_NAMESPACE::DeviceSize maxMovedBytes_ ) VULKAN_HPP_NOEXCEPT
{
maxMovedBytes = maxMovedBytes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClusterAccelerationStructureMoveObjectsInputNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClusterAccelerationStructureMoveObjectsInputNV*>( this );
}
operator VkClusterAccelerationStructureMoveObjectsInputNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClusterAccelerationStructureMoveObjectsInputNV*>( this );
}
operator VkClusterAccelerationStructureMoveObjectsInputNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClusterAccelerationStructureMoveObjectsInputNV*>( this );
}
operator VkClusterAccelerationStructureMoveObjectsInputNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClusterAccelerationStructureMoveObjectsInputNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, type, noMoveOverlap, maxMovedBytes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ClusterAccelerationStructureMoveObjectsInputNV const & ) const = default;
#else
bool operator==( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( type == rhs.type )
&& ( noMoveOverlap == rhs.noMoveOverlap )
&& ( maxMovedBytes == rhs.maxMovedBytes );
#endif
}
bool operator!=( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureMoveObjectsInputNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV type = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV::eClustersBottomLevel;
VULKAN_HPP_NAMESPACE::Bool32 noMoveOverlap = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxMovedBytes = {};
};
template <>
struct CppType<StructureType, StructureType::eClusterAccelerationStructureMoveObjectsInputNV>
{
using Type = ClusterAccelerationStructureMoveObjectsInputNV;
};
union ClusterAccelerationStructureOpInputNV
{
using NativeType = VkClusterAccelerationStructureOpInputNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel_ = {} )
: pClustersBottomLevel( pClustersBottomLevel_ )
{}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters_ )
: pTriangleClusters( pTriangleClusters_ )
{}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects_ )
: pMoveObjects( pMoveObjects_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV & setPClustersBottomLevel( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel_ ) VULKAN_HPP_NOEXCEPT
{
pClustersBottomLevel = pClustersBottomLevel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV & setPTriangleClusters( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters_ ) VULKAN_HPP_NOEXCEPT
{
pTriangleClusters = pTriangleClusters_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV & setPMoveObjects( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects_ ) VULKAN_HPP_NOEXCEPT
{
pMoveObjects = pMoveObjects_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClusterAccelerationStructureOpInputNV const &() const
{
return *reinterpret_cast<const VkClusterAccelerationStructureOpInputNV*>( this );
}
operator VkClusterAccelerationStructureOpInputNV &()
{
return *reinterpret_cast<VkClusterAccelerationStructureOpInputNV*>( this );
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel;
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters;
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects;
#else
VkClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel;
VkClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters;
VkClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
// wrapper struct for struct VkClusterAccelerationStructureInputInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureInputInfoNV.html
struct ClusterAccelerationStructureInputInfoNV
{
using NativeType = VkClusterAccelerationStructureInputInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureInputInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV(uint32_t maxAccelerationStructureCount_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV opType_ = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV::eMoveObjects, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV opMode_ = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV::eImplicitDestinations, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV opInput_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxAccelerationStructureCount{ maxAccelerationStructureCount_ }, flags{ flags_ }, opType{ opType_ }, opMode{ opMode_ }, opInput{ opInput_ }
{}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV( ClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClusterAccelerationStructureInputInfoNV( VkClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ClusterAccelerationStructureInputInfoNV( *reinterpret_cast<ClusterAccelerationStructureInputInfoNV const *>( &rhs ) )
{}
ClusterAccelerationStructureInputInfoNV & operator=( ClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClusterAccelerationStructureInputInfoNV & operator=( VkClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setMaxAccelerationStructureCount( uint32_t maxAccelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
{
maxAccelerationStructureCount = maxAccelerationStructureCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setOpType( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV opType_ ) VULKAN_HPP_NOEXCEPT
{
opType = opType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setOpMode( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV opMode_ ) VULKAN_HPP_NOEXCEPT
{
opMode = opMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setOpInput( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV const & opInput_ ) VULKAN_HPP_NOEXCEPT
{
opInput = opInput_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClusterAccelerationStructureInputInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClusterAccelerationStructureInputInfoNV*>( this );
}
operator VkClusterAccelerationStructureInputInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClusterAccelerationStructureInputInfoNV*>( this );
}
operator VkClusterAccelerationStructureInputInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClusterAccelerationStructureInputInfoNV*>( this );
}
operator VkClusterAccelerationStructureInputInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClusterAccelerationStructureInputInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR const &, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV const &, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV const &, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxAccelerationStructureCount, flags, opType, opMode, opInput );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureInputInfoNV;
void * pNext = {};
uint32_t maxAccelerationStructureCount = {};
VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV opType = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV::eMoveObjects;
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV opMode = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV::eImplicitDestinations;
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV opInput = {};
};
template <>
struct CppType<StructureType, StructureType::eClusterAccelerationStructureInputInfoNV>
{
using Type = ClusterAccelerationStructureInputInfoNV;
};
// wrapper struct for struct VkStridedDeviceAddressRegionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStridedDeviceAddressRegionKHR.html
struct StridedDeviceAddressRegionKHR
{
using NativeType = VkStridedDeviceAddressRegionKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
: deviceAddress{ deviceAddress_ }, stride{ stride_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: StridedDeviceAddressRegionKHR( *reinterpret_cast<StridedDeviceAddressRegionKHR const *>( &rhs ) )
{}
StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
{
stride = stride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkStridedDeviceAddressRegionKHR*>( this );
}
operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkStridedDeviceAddressRegionKHR*>( this );
}
operator VkStridedDeviceAddressRegionKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkStridedDeviceAddressRegionKHR*>( this );
}
operator VkStridedDeviceAddressRegionKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkStridedDeviceAddressRegionKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( deviceAddress, stride, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default;
#else
bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( deviceAddress == rhs.deviceAddress )
&& ( stride == rhs.stride )
&& ( size == rhs.size );
#endif
}
bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
// wrapper struct for struct VkClusterAccelerationStructureCommandsInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureCommandsInfoNV.html
struct ClusterAccelerationStructureCommandsInfoNV
{
using NativeType = VkClusterAccelerationStructureCommandsInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureCommandsInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV(VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV input_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress dstImplicitData_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress scratchData_ = {}, VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR dstAddressesArray_ = {}, VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR dstSizesArray_ = {}, VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR srcInfosArray_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount_ = {}, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, input{ input_ }, dstImplicitData{ dstImplicitData_ }, scratchData{ scratchData_ }, dstAddressesArray{ dstAddressesArray_ }, dstSizesArray{ dstSizesArray_ }, srcInfosArray{ srcInfosArray_ }, srcInfosCount{ srcInfosCount_ }, addressResolutionFlags{ addressResolutionFlags_ }
{}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV( ClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClusterAccelerationStructureCommandsInfoNV( VkClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ClusterAccelerationStructureCommandsInfoNV( *reinterpret_cast<ClusterAccelerationStructureCommandsInfoNV const *>( &rhs ) )
{}
ClusterAccelerationStructureCommandsInfoNV & operator=( ClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClusterAccelerationStructureCommandsInfoNV & operator=( VkClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setInput( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV const & input_ ) VULKAN_HPP_NOEXCEPT
{
input = input_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setDstImplicitData( VULKAN_HPP_NAMESPACE::DeviceAddress dstImplicitData_ ) VULKAN_HPP_NOEXCEPT
{
dstImplicitData = dstImplicitData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setScratchData( VULKAN_HPP_NAMESPACE::DeviceAddress scratchData_ ) VULKAN_HPP_NOEXCEPT
{
scratchData = scratchData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setDstAddressesArray( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & dstAddressesArray_ ) VULKAN_HPP_NOEXCEPT
{
dstAddressesArray = dstAddressesArray_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setDstSizesArray( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & dstSizesArray_ ) VULKAN_HPP_NOEXCEPT
{
dstSizesArray = dstSizesArray_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setSrcInfosArray( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & srcInfosArray_ ) VULKAN_HPP_NOEXCEPT
{
srcInfosArray = srcInfosArray_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setSrcInfosCount( VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount_ ) VULKAN_HPP_NOEXCEPT
{
srcInfosCount = srcInfosCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setAddressResolutionFlags( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags_ ) VULKAN_HPP_NOEXCEPT
{
addressResolutionFlags = addressResolutionFlags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClusterAccelerationStructureCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClusterAccelerationStructureCommandsInfoNV*>( this );
}
operator VkClusterAccelerationStructureCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClusterAccelerationStructureCommandsInfoNV*>( this );
}
operator VkClusterAccelerationStructureCommandsInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClusterAccelerationStructureCommandsInfoNV*>( this );
}
operator VkClusterAccelerationStructureCommandsInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClusterAccelerationStructureCommandsInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const &, VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const &, VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagsNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, input, dstImplicitData, scratchData, dstAddressesArray, dstSizesArray, srcInfosArray, srcInfosCount, addressResolutionFlags );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureCommandsInfoNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV input = {};
VULKAN_HPP_NAMESPACE::DeviceAddress dstImplicitData = {};
VULKAN_HPP_NAMESPACE::DeviceAddress scratchData = {};
VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR dstAddressesArray = {};
VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR dstSizesArray = {};
VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR srcInfosArray = {};
VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount = {};
VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags = {};
};
template <>
struct CppType<StructureType, StructureType::eClusterAccelerationStructureCommandsInfoNV>
{
using Type = ClusterAccelerationStructureCommandsInfoNV;
};
// wrapper struct for struct VkClusterAccelerationStructureInstantiateClusterInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureInstantiateClusterInfoNV.html
struct ClusterAccelerationStructureInstantiateClusterInfoNV
{
using NativeType = VkClusterAccelerationStructureInstantiateClusterInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureInstantiateClusterInfoNV(uint32_t clusterIdOffset_ = {}, uint32_t geometryIndexOffset_ = {}, uint32_t reserved_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress clusterTemplateAddress_ = {}, VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV vertexBuffer_ = {}) VULKAN_HPP_NOEXCEPT
: clusterIdOffset{ clusterIdOffset_ }, geometryIndexOffset{ geometryIndexOffset_ }, reserved{ reserved_ }, clusterTemplateAddress{ clusterTemplateAddress_ }, vertexBuffer{ vertexBuffer_ }
{}
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureInstantiateClusterInfoNV( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClusterAccelerationStructureInstantiateClusterInfoNV( VkClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ClusterAccelerationStructureInstantiateClusterInfoNV( *reinterpret_cast<ClusterAccelerationStructureInstantiateClusterInfoNV const *>( &rhs ) )
{}
ClusterAccelerationStructureInstantiateClusterInfoNV & operator=( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClusterAccelerationStructureInstantiateClusterInfoNV & operator=( VkClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInstantiateClusterInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setClusterIdOffset( uint32_t clusterIdOffset_ ) VULKAN_HPP_NOEXCEPT
{
clusterIdOffset = clusterIdOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setGeometryIndexOffset( uint32_t geometryIndexOffset_ ) VULKAN_HPP_NOEXCEPT
{
geometryIndexOffset = geometryIndexOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setReserved( uint32_t reserved_ ) VULKAN_HPP_NOEXCEPT
{
reserved = reserved_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setClusterTemplateAddress( VULKAN_HPP_NAMESPACE::DeviceAddress clusterTemplateAddress_ ) VULKAN_HPP_NOEXCEPT
{
clusterTemplateAddress = clusterTemplateAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setVertexBuffer( VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV const & vertexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
vertexBuffer = vertexBuffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClusterAccelerationStructureInstantiateClusterInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClusterAccelerationStructureInstantiateClusterInfoNV*>( this );
}
operator VkClusterAccelerationStructureInstantiateClusterInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClusterAccelerationStructureInstantiateClusterInfoNV*>( this );
}
operator VkClusterAccelerationStructureInstantiateClusterInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClusterAccelerationStructureInstantiateClusterInfoNV*>( this );
}
operator VkClusterAccelerationStructureInstantiateClusterInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClusterAccelerationStructureInstantiateClusterInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( clusterIdOffset, geometryIndexOffset, reserved, clusterTemplateAddress, vertexBuffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ClusterAccelerationStructureInstantiateClusterInfoNV const & ) const = default;
#else
bool operator==( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( clusterIdOffset == rhs.clusterIdOffset )
&& ( geometryIndexOffset == rhs.geometryIndexOffset )
&& ( reserved == rhs.reserved )
&& ( clusterTemplateAddress == rhs.clusterTemplateAddress )
&& ( vertexBuffer == rhs.vertexBuffer );
#endif
}
bool operator!=( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t clusterIdOffset = {};
uint32_t geometryIndexOffset : 24;
uint32_t reserved : 8;
VULKAN_HPP_NAMESPACE::DeviceAddress clusterTemplateAddress = {};
VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV vertexBuffer = {};
};
// wrapper struct for struct VkClusterAccelerationStructureMoveObjectsInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureMoveObjectsInfoNV.html
struct ClusterAccelerationStructureMoveObjectsInfoNV
{
using NativeType = VkClusterAccelerationStructureMoveObjectsInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInfoNV(VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
: srcAccelerationStructure{ srcAccelerationStructure_ }
{}
VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInfoNV( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClusterAccelerationStructureMoveObjectsInfoNV( VkClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ClusterAccelerationStructureMoveObjectsInfoNV( *reinterpret_cast<ClusterAccelerationStructureMoveObjectsInfoNV const *>( &rhs ) )
{}
ClusterAccelerationStructureMoveObjectsInfoNV & operator=( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ClusterAccelerationStructureMoveObjectsInfoNV & operator=( VkClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInfoNV & setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
srcAccelerationStructure = srcAccelerationStructure_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkClusterAccelerationStructureMoveObjectsInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClusterAccelerationStructureMoveObjectsInfoNV*>( this );
}
operator VkClusterAccelerationStructureMoveObjectsInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClusterAccelerationStructureMoveObjectsInfoNV*>( this );
}
operator VkClusterAccelerationStructureMoveObjectsInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkClusterAccelerationStructureMoveObjectsInfoNV*>( this );
}
operator VkClusterAccelerationStructureMoveObjectsInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkClusterAccelerationStructureMoveObjectsInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( srcAccelerationStructure );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ClusterAccelerationStructureMoveObjectsInfoNV const & ) const = default;
#else
bool operator==( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( srcAccelerationStructure == rhs.srcAccelerationStructure );
#endif
}
bool operator!=( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructure = {};
};
// wrapper struct for struct VkCoarseSampleLocationNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCoarseSampleLocationNV.html
struct CoarseSampleLocationNV
{
using NativeType = VkCoarseSampleLocationNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV(uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {}) VULKAN_HPP_NOEXCEPT
: pixelX{ pixelX_ }, pixelY{ pixelY_ }, sample{ sample_ }
{}
VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CoarseSampleLocationNV( *reinterpret_cast<CoarseSampleLocationNV const *>( &rhs ) )
{}
CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT
{
pixelX = pixelX_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT
{
pixelY = pixelY_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT
{
sample = sample_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCoarseSampleLocationNV*>( this );
}
operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCoarseSampleLocationNV*>( this );
}
operator VkCoarseSampleLocationNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCoarseSampleLocationNV*>( this );
}
operator VkCoarseSampleLocationNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCoarseSampleLocationNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( pixelX, pixelY, sample );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CoarseSampleLocationNV const & ) const = default;
#else
bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( pixelX == rhs.pixelX )
&& ( pixelY == rhs.pixelY )
&& ( sample == rhs.sample );
#endif
}
bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t pixelX = {};
uint32_t pixelY = {};
uint32_t sample = {};
};
// wrapper struct for struct VkCoarseSampleOrderCustomNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCoarseSampleOrderCustomNV.html
struct CoarseSampleOrderCustomNV
{
using NativeType = VkCoarseSampleOrderCustomNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV(VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, uint32_t sampleCount_ = {}, uint32_t sampleLocationCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
: shadingRate{ shadingRate_ }, sampleCount{ sampleCount_ }, sampleLocationCount{ sampleLocationCount_ }, pSampleLocations{ pSampleLocations_ }
{}
VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CoarseSampleOrderCustomNV( *reinterpret_cast<CoarseSampleOrderCustomNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, uint32_t sampleCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ )
: shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( static_cast<uint32_t>( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
{
shadingRate = shadingRate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT
{
sampleCount = sampleCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationCount = sampleLocationCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
pSampleLocations = pSampleLocations_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CoarseSampleOrderCustomNV & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationCount = static_cast<uint32_t>( sampleLocations_.size() );
pSampleLocations = sampleLocations_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( this );
}
operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCoarseSampleOrderCustomNV*>( this );
}
operator VkCoarseSampleOrderCustomNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( this );
}
operator VkCoarseSampleOrderCustomNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCoarseSampleOrderCustomNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( shadingRate, sampleCount, sampleLocationCount, pSampleLocations );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default;
#else
bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( shadingRate == rhs.shadingRate )
&& ( sampleCount == rhs.sampleCount )
&& ( sampleLocationCount == rhs.sampleLocationCount )
&& ( pSampleLocations == rhs.pSampleLocations );
#endif
}
bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations;
uint32_t sampleCount = {};
uint32_t sampleLocationCount = {};
const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {};
};
// wrapper struct for struct VkColorBlendAdvancedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkColorBlendAdvancedEXT.html
struct ColorBlendAdvancedEXT
{
using NativeType = VkColorBlendAdvancedEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT(VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, VULKAN_HPP_NAMESPACE::Bool32 clampResults_ = {}) VULKAN_HPP_NOEXCEPT
: advancedBlendOp{ advancedBlendOp_ }, srcPremultiplied{ srcPremultiplied_ }, dstPremultiplied{ dstPremultiplied_ }, blendOverlap{ blendOverlap_ }, clampResults{ clampResults_ }
{}
VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ColorBlendAdvancedEXT( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ColorBlendAdvancedEXT( *reinterpret_cast<ColorBlendAdvancedEXT const *>( &rhs ) )
{}
ColorBlendAdvancedEXT & operator=( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ColorBlendAdvancedEXT & operator=( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setAdvancedBlendOp( VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ ) VULKAN_HPP_NOEXCEPT
{
advancedBlendOp = advancedBlendOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
{
srcPremultiplied = srcPremultiplied_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
{
dstPremultiplied = dstPremultiplied_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
{
blendOverlap = blendOverlap_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setClampResults( VULKAN_HPP_NAMESPACE::Bool32 clampResults_ ) VULKAN_HPP_NOEXCEPT
{
clampResults = clampResults_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkColorBlendAdvancedEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkColorBlendAdvancedEXT*>( this );
}
operator VkColorBlendAdvancedEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkColorBlendAdvancedEXT*>( this );
}
operator VkColorBlendAdvancedEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkColorBlendAdvancedEXT*>( this );
}
operator VkColorBlendAdvancedEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkColorBlendAdvancedEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::BlendOp const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::BlendOverlapEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( advancedBlendOp, srcPremultiplied, dstPremultiplied, blendOverlap, clampResults );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ColorBlendAdvancedEXT const & ) const = default;
#else
bool operator==( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( advancedBlendOp == rhs.advancedBlendOp )
&& ( srcPremultiplied == rhs.srcPremultiplied )
&& ( dstPremultiplied == rhs.dstPremultiplied )
&& ( blendOverlap == rhs.blendOverlap )
&& ( clampResults == rhs.clampResults );
#endif
}
bool operator!=( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
VULKAN_HPP_NAMESPACE::Bool32 clampResults = {};
};
// wrapper struct for struct VkColorBlendEquationEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkColorBlendEquationEXT.html
struct ColorBlendEquationEXT
{
using NativeType = VkColorBlendEquationEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT(VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd) VULKAN_HPP_NOEXCEPT
: srcColorBlendFactor{ srcColorBlendFactor_ }, dstColorBlendFactor{ dstColorBlendFactor_ }, colorBlendOp{ colorBlendOp_ }, srcAlphaBlendFactor{ srcAlphaBlendFactor_ }, dstAlphaBlendFactor{ dstAlphaBlendFactor_ }, alphaBlendOp{ alphaBlendOp_ }
{}
VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ColorBlendEquationEXT( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ColorBlendEquationEXT( *reinterpret_cast<ColorBlendEquationEXT const *>( &rhs ) )
{}
ColorBlendEquationEXT & operator=( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ColorBlendEquationEXT & operator=( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
srcColorBlendFactor = srcColorBlendFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
dstColorBlendFactor = dstColorBlendFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
{
colorBlendOp = colorBlendOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
srcAlphaBlendFactor = srcAlphaBlendFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
dstAlphaBlendFactor = dstAlphaBlendFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
{
alphaBlendOp = alphaBlendOp_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkColorBlendEquationEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkColorBlendEquationEXT*>( this );
}
operator VkColorBlendEquationEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkColorBlendEquationEXT*>( this );
}
operator VkColorBlendEquationEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkColorBlendEquationEXT*>( this );
}
operator VkColorBlendEquationEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkColorBlendEquationEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendOp const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendOp const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ColorBlendEquationEXT const & ) const = default;
#else
bool operator==( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( srcColorBlendFactor == rhs.srcColorBlendFactor )
&& ( dstColorBlendFactor == rhs.dstColorBlendFactor )
&& ( colorBlendOp == rhs.colorBlendOp )
&& ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
&& ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
&& ( alphaBlendOp == rhs.alphaBlendOp );
#endif
}
bool operator!=( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
};
// wrapper struct for struct VkCommandBufferAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferAllocateInfo.html
struct CommandBufferAllocateInfo
{
using NativeType = VkCommandBufferAllocateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo(VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, commandPool{ commandPool_ }, level{ level_ }, commandBufferCount{ commandBufferCount_ }
{}
VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandBufferAllocateInfo( *reinterpret_cast<CommandBufferAllocateInfo const *>( &rhs ) )
{}
CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
{
commandPool = commandPool_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
{
level = level_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferCount = commandBufferCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferAllocateInfo*>( this );
}
operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferAllocateInfo*>( this );
}
operator VkCommandBufferAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCommandBufferAllocateInfo*>( this );
}
operator VkCommandBufferAllocateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCommandBufferAllocateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandPool const &, VULKAN_HPP_NAMESPACE::CommandBufferLevel const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, commandPool, level, commandBufferCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CommandBufferAllocateInfo const & ) const = default;
#else
bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( commandPool == rhs.commandPool )
&& ( level == rhs.level )
&& ( commandBufferCount == rhs.commandBufferCount );
#endif
}
bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::CommandPool commandPool = {};
VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary;
uint32_t commandBufferCount = {};
};
template <>
struct CppType<StructureType, StructureType::eCommandBufferAllocateInfo>
{
using Type = CommandBufferAllocateInfo;
};
// wrapper struct for struct VkCommandBufferInheritanceInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceInfo.html
struct CommandBufferInheritanceInfo
{
using NativeType = VkCommandBufferInheritanceInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, renderPass{ renderPass_ }, subpass{ subpass_ }, framebuffer{ framebuffer_ }, occlusionQueryEnable{ occlusionQueryEnable_ }, queryFlags{ queryFlags_ }, pipelineStatistics{ pipelineStatistics_ }
{}
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandBufferInheritanceInfo( *reinterpret_cast<CommandBufferInheritanceInfo const *>( &rhs ) )
{}
CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
{
subpass = subpass_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
{
framebuffer = framebuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
{
occlusionQueryEnable = occlusionQueryEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
{
queryFlags = queryFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStatistics = pipelineStatistics_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>( this );
}
operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferInheritanceInfo*>( this );
}
operator VkCommandBufferInheritanceInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCommandBufferInheritanceInfo*>( this );
}
operator VkCommandBufferInheritanceInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCommandBufferInheritanceInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Framebuffer const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::QueryControlFlags const &, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CommandBufferInheritanceInfo const & ) const = default;
#else
bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( renderPass == rhs.renderPass )
&& ( subpass == rhs.subpass )
&& ( framebuffer == rhs.framebuffer )
&& ( occlusionQueryEnable == rhs.occlusionQueryEnable )
&& ( queryFlags == rhs.queryFlags )
&& ( pipelineStatistics == rhs.pipelineStatistics );
#endif
}
bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
uint32_t subpass = {};
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {};
VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {};
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
};
template <>
struct CppType<StructureType, StructureType::eCommandBufferInheritanceInfo>
{
using Type = CommandBufferInheritanceInfo;
};
// wrapper struct for struct VkCommandBufferBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferBeginInfo.html
struct CommandBufferBeginInfo
{
using NativeType = VkCommandBufferBeginInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo(VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pInheritanceInfo{ pInheritanceInfo_ }
{}
VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandBufferBeginInfo( *reinterpret_cast<CommandBufferBeginInfo const *>( &rhs ) )
{}
CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
{
pInheritanceInfo = pInheritanceInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferBeginInfo*>( this );
}
operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferBeginInfo*>( this );
}
operator VkCommandBufferBeginInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCommandBufferBeginInfo*>( this );
}
operator VkCommandBufferBeginInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCommandBufferBeginInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags const &, const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pInheritanceInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CommandBufferBeginInfo const & ) const = default;
#else
bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( pInheritanceInfo == rhs.pInheritanceInfo );
#endif
}
bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {};
const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eCommandBufferBeginInfo>
{
using Type = CommandBufferBeginInfo;
};
// wrapper struct for struct VkCommandBufferInheritanceConditionalRenderingInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceConditionalRenderingInfoEXT.html
struct CommandBufferInheritanceConditionalRenderingInfoEXT
{
using NativeType = VkCommandBufferInheritanceConditionalRenderingInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, conditionalRenderingEnable{ conditionalRenderingEnable_ }
{}
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandBufferInheritanceConditionalRenderingInfoEXT( *reinterpret_cast<CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs ) )
{}
CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT
{
conditionalRenderingEnable = conditionalRenderingEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
}
operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
}
operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
}
operator VkCommandBufferInheritanceConditionalRenderingInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, conditionalRenderingEnable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default;
#else
bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( conditionalRenderingEnable == rhs.conditionalRenderingEnable );
#endif
}
bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {};
};
template <>
struct CppType<StructureType, StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT>
{
using Type = CommandBufferInheritanceConditionalRenderingInfoEXT;
};
// wrapper struct for struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceRenderPassTransformInfoQCOM.html
struct CommandBufferInheritanceRenderPassTransformInfoQCOM
{
using NativeType = VkCommandBufferInheritanceRenderPassTransformInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, transform{ transform_ }, renderArea{ renderArea_ }
{}
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandBufferInheritanceRenderPassTransformInfoQCOM( *reinterpret_cast<CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs ) )
{}
CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
{
transform = transform_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
{
renderArea = renderArea_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
}
operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
}
operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
}
operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::Rect2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, transform, renderArea );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default;
#else
bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( transform == rhs.transform )
&& ( renderArea == rhs.renderArea );
#endif
}
bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
};
template <>
struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM>
{
using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM;
};
// wrapper struct for struct VkCommandBufferInheritanceRenderingInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceRenderingInfo.html
struct CommandBufferInheritanceRenderingInfo
{
using NativeType = VkCommandBufferInheritanceRenderingInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderingInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo(VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, viewMask{ viewMask_ }, colorAttachmentCount{ colorAttachmentCount_ }, pColorAttachmentFormats{ pColorAttachmentFormats_ }, depthAttachmentFormat{ depthAttachmentFormat_ }, stencilAttachmentFormat{ stencilAttachmentFormat_ }, rasterizationSamples{ rasterizationSamples_ }
{}
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferInheritanceRenderingInfo( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandBufferInheritanceRenderingInfo( *reinterpret_cast<CommandBufferInheritanceRenderingInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), viewMask( viewMask_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) ), pColorAttachmentFormats( colorAttachmentFormats_.data() ), depthAttachmentFormat( depthAttachmentFormat_ ), stencilAttachmentFormat( stencilAttachmentFormat_ ), rasterizationSamples( rasterizationSamples_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CommandBufferInheritanceRenderingInfo & operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CommandBufferInheritanceRenderingInfo & operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
{
viewMask = viewMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachmentFormats = pColorAttachmentFormats_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CommandBufferInheritanceRenderingInfo & setColorAttachmentFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
pColorAttachmentFormats = colorAttachmentFormats_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
{
depthAttachmentFormat = depthAttachmentFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
{
stencilAttachmentFormat = stencilAttachmentFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationSamples = rasterizationSamples_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferInheritanceRenderingInfo*>( this );
}
operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferInheritanceRenderingInfo*>( this );
}
operator VkCommandBufferInheritanceRenderingInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCommandBufferInheritanceRenderingInfo*>( this );
}
operator VkCommandBufferInheritanceRenderingInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCommandBufferInheritanceRenderingInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderingFlags const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat, rasterizationSamples );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CommandBufferInheritanceRenderingInfo const & ) const = default;
#else
bool operator==( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( viewMask == rhs.viewMask )
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
&& ( pColorAttachmentFormats == rhs.pColorAttachmentFormats )
&& ( depthAttachmentFormat == rhs.depthAttachmentFormat )
&& ( stencilAttachmentFormat == rhs.stencilAttachmentFormat )
&& ( rasterizationSamples == rhs.rasterizationSamples );
#endif
}
bool operator!=( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RenderingFlags flags = {};
uint32_t viewMask = {};
uint32_t colorAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {};
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
};
template <>
struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderingInfo>
{
using Type = CommandBufferInheritanceRenderingInfo;
};
using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo;
// wrapper struct for struct VkViewport, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViewport.html
struct Viewport
{
using NativeType = VkViewport;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR Viewport(float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {}) VULKAN_HPP_NOEXCEPT
: x{ x_ }, y{ y_ }, width{ width_ }, height{ height_ }, minDepth{ minDepth_ }, maxDepth{ maxDepth_ }
{}
VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
: Viewport( *reinterpret_cast<Viewport const *>( &rhs ) )
{}
Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
{
x = x_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
{
y = y_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
{
width = width_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
{
height = height_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
{
minDepth = minDepth_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
{
maxDepth = maxDepth_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkViewport const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkViewport*>( this );
}
operator VkViewport &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkViewport*>( this );
}
operator VkViewport const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkViewport*>( this );
}
operator VkViewport *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkViewport*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<float const &, float const &, float const &, float const &, float const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( x, y, width, height, minDepth, maxDepth );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( Viewport const & ) const = default;
#else
bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( x == rhs.x )
&& ( y == rhs.y )
&& ( width == rhs.width )
&& ( height == rhs.height )
&& ( minDepth == rhs.minDepth )
&& ( maxDepth == rhs.maxDepth );
#endif
}
bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
float x = {};
float y = {};
float width = {};
float height = {};
float minDepth = {};
float maxDepth = {};
};
// wrapper struct for struct VkCommandBufferInheritanceViewportScissorInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceViewportScissorInfoNV.html
struct CommandBufferInheritanceViewportScissorInfoNV
{
using NativeType = VkCommandBufferInheritanceViewportScissorInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV(VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {}, uint32_t viewportDepthCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, viewportScissor2D{ viewportScissor2D_ }, viewportDepthCount{ viewportDepthCount_ }, pViewportDepths{ pViewportDepths_ }
{}
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandBufferInheritanceViewportScissorInfoNV( *reinterpret_cast<CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs ) )
{}
CommandBufferInheritanceViewportScissorInfoNV & operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CommandBufferInheritanceViewportScissorInfoNV & operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ ) VULKAN_HPP_NOEXCEPT
{
viewportScissor2D = viewportScissor2D_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT
{
viewportDepthCount = viewportDepthCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT
{
pViewportDepths = pViewportDepths_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferInheritanceViewportScissorInfoNV*>( this );
}
operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferInheritanceViewportScissorInfoNV*>( this );
}
operator VkCommandBufferInheritanceViewportScissorInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCommandBufferInheritanceViewportScissorInfoNV*>( this );
}
operator VkCommandBufferInheritanceViewportScissorInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCommandBufferInheritanceViewportScissorInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Viewport * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, viewportScissor2D, viewportDepthCount, pViewportDepths );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default;
#else
bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( viewportScissor2D == rhs.viewportScissor2D )
&& ( viewportDepthCount == rhs.viewportDepthCount )
&& ( pViewportDepths == rhs.pViewportDepths );
#endif
}
bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D = {};
uint32_t viewportDepthCount = {};
const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths = {};
};
template <>
struct CppType<StructureType, StructureType::eCommandBufferInheritanceViewportScissorInfoNV>
{
using Type = CommandBufferInheritanceViewportScissorInfoNV;
};
// wrapper struct for struct VkCommandBufferSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferSubmitInfo.html
struct CommandBufferSubmitInfo
{
using NativeType = VkCommandBufferSubmitInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo(VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, commandBuffer{ commandBuffer_ }, deviceMask{ deviceMask_ }
{}
VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferSubmitInfo( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandBufferSubmitInfo( *reinterpret_cast<CommandBufferSubmitInfo const *>( &rhs ) )
{}
CommandBufferSubmitInfo & operator=( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CommandBufferSubmitInfo & operator=( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT
{
commandBuffer = commandBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
{
deviceMask = deviceMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferSubmitInfo*>( this );
}
operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferSubmitInfo*>( this );
}
operator VkCommandBufferSubmitInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCommandBufferSubmitInfo*>( this );
}
operator VkCommandBufferSubmitInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCommandBufferSubmitInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandBuffer const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, commandBuffer, deviceMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CommandBufferSubmitInfo const & ) const = default;
#else
bool operator==( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( commandBuffer == rhs.commandBuffer )
&& ( deviceMask == rhs.deviceMask );
#endif
}
bool operator!=( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {};
uint32_t deviceMask = {};
};
template <>
struct CppType<StructureType, StructureType::eCommandBufferSubmitInfo>
{
using Type = CommandBufferSubmitInfo;
};
using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo;
// wrapper struct for struct VkCommandPoolCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandPoolCreateInfo.html
struct CommandPoolCreateInfo
{
using NativeType = VkCommandPoolCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, queueFamilyIndex{ queueFamilyIndex_ }
{}
VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
{}
CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndex = queueFamilyIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandPoolCreateInfo*>( this );
}
operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandPoolCreateInfo*>( this );
}
operator VkCommandPoolCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCommandPoolCreateInfo*>( this );
}
operator VkCommandPoolCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCommandPoolCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, queueFamilyIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CommandPoolCreateInfo const & ) const = default;
#else
bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( queueFamilyIndex == rhs.queueFamilyIndex );
#endif
}
bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
uint32_t queueFamilyIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
{
using Type = CommandPoolCreateInfo;
};
// wrapper struct for struct VkSpecializationMapEntry, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSpecializationMapEntry.html
struct SpecializationMapEntry
{
using NativeType = VkSpecializationMapEntry;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SpecializationMapEntry(uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {}) VULKAN_HPP_NOEXCEPT
: constantID{ constantID_ }, offset{ offset_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
: SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
{}
SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
{
constantID = constantID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSpecializationMapEntry*>( this );
}
operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSpecializationMapEntry*>( this );
}
operator VkSpecializationMapEntry const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSpecializationMapEntry*>( this );
}
operator VkSpecializationMapEntry *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSpecializationMapEntry*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, size_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( constantID, offset, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SpecializationMapEntry const & ) const = default;
#else
bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( constantID == rhs.constantID )
&& ( offset == rhs.offset )
&& ( size == rhs.size );
#endif
}
bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t constantID = {};
uint32_t offset = {};
size_t size = {};
};
// wrapper struct for struct VkSpecializationInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSpecializationInfo.html
struct SpecializationInfo
{
using NativeType = VkSpecializationInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SpecializationInfo(uint32_t mapEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {}, size_t dataSize_ = {}, const void * pData_ = {}) VULKAN_HPP_NOEXCEPT
: mapEntryCount{ mapEntryCount_ }, pMapEntries{ pMapEntries_ }, dataSize{ dataSize_ }, pData{ pData_ }
{}
VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
: mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) ), pMapEntries( mapEntries_.data() ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
{
mapEntryCount = mapEntryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT
{
pMapEntries = pMapEntries_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SpecializationInfo & setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
{
mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
pMapEntries = mapEntries_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = dataSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
{
pData = pData_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = data_.size() * sizeof(T);
pData = data_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSpecializationInfo*>( this );
}
operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSpecializationInfo*>( this );
}
operator VkSpecializationInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSpecializationInfo*>( this );
}
operator VkSpecializationInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSpecializationInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * const &, size_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( mapEntryCount, pMapEntries, dataSize, pData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SpecializationInfo const & ) const = default;
#else
bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( mapEntryCount == rhs.mapEntryCount )
&& ( pMapEntries == rhs.pMapEntries )
&& ( dataSize == rhs.dataSize )
&& ( pData == rhs.pData );
#endif
}
bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t mapEntryCount = {};
const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries = {};
size_t dataSize = {};
const void * pData = {};
};
// wrapper struct for struct VkPipelineShaderStageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageCreateInfo.html
struct PipelineShaderStageCreateInfo
{
using NativeType = VkPipelineShaderStageCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, const char * pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, stage{ stage_ }, module{ module_ }, pName{ pName_ }, pSpecializationInfo{ pSpecializationInfo_ }
{}
VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
{}
PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
{
stage = stage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
{
module = module_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
{
pName = pName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
{
pSpecializationInfo = pSpecializationInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>( this );
}
operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineShaderStageCreateInfo*>( this );
}
operator VkPipelineShaderStageCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineShaderStageCreateInfo*>( this );
}
operator VkPipelineShaderStageCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineShaderStageCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags const &, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits const &, VULKAN_HPP_NAMESPACE::ShaderModule const &, const char * const &, const VULKAN_HPP_NAMESPACE::SpecializationInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, stage, module, pName, pSpecializationInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) return cmp;
if ( auto cmp = module <=> rhs.module; cmp != 0 ) return cmp;
if ( pName != rhs.pName )
if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( stage == rhs.stage )
&& ( module == rhs.module )
&& ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) )
&& ( pSpecializationInfo == rhs.pSpecializationInfo );
}
bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
VULKAN_HPP_NAMESPACE::ShaderModule module = {};
const char * pName = {};
const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
{
using Type = PipelineShaderStageCreateInfo;
};
// wrapper struct for struct VkComputePipelineCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkComputePipelineCreateInfo.html
struct ComputePipelineCreateInfo
{
using NativeType = VkComputePipelineCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, stage{ stage_ }, layout{ layout_ }, basePipelineHandle{ basePipelineHandle_ }, basePipelineIndex{ basePipelineIndex_ }
{}
VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
{}
ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
{
stage = stage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineHandle = basePipelineHandle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineIndex = basePipelineIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkComputePipelineCreateInfo*>( this );
}
operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkComputePipelineCreateInfo*>( this );
}
operator VkComputePipelineCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkComputePipelineCreateInfo*>( this );
}
operator VkComputePipelineCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkComputePipelineCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, stage, layout, basePipelineHandle, basePipelineIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ComputePipelineCreateInfo const & ) const = default;
#else
bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( stage == rhs.stage )
&& ( layout == rhs.layout )
&& ( basePipelineHandle == rhs.basePipelineHandle )
&& ( basePipelineIndex == rhs.basePipelineIndex );
#endif
}
bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {};
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
int32_t basePipelineIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
{
using Type = ComputePipelineCreateInfo;
};
// wrapper struct for struct VkComputePipelineIndirectBufferInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkComputePipelineIndirectBufferInfoNV.html
struct ComputePipelineIndirectBufferInfoNV
{
using NativeType = VkComputePipelineIndirectBufferInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineIndirectBufferInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceAddress{ deviceAddress_ }, size{ size_ }, pipelineDeviceAddressCaptureReplay{ pipelineDeviceAddressCaptureReplay_ }
{}
VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ComputePipelineIndirectBufferInfoNV( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ComputePipelineIndirectBufferInfoNV( *reinterpret_cast<ComputePipelineIndirectBufferInfoNV const *>( &rhs ) )
{}
ComputePipelineIndirectBufferInfoNV & operator=( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ComputePipelineIndirectBufferInfoNV & operator=( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setPipelineDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
pipelineDeviceAddressCaptureReplay = pipelineDeviceAddressCaptureReplay_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkComputePipelineIndirectBufferInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkComputePipelineIndirectBufferInfoNV*>( this );
}
operator VkComputePipelineIndirectBufferInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkComputePipelineIndirectBufferInfoNV*>( this );
}
operator VkComputePipelineIndirectBufferInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkComputePipelineIndirectBufferInfoNV*>( this );
}
operator VkComputePipelineIndirectBufferInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkComputePipelineIndirectBufferInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceAddress, size, pipelineDeviceAddressCaptureReplay );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ComputePipelineIndirectBufferInfoNV const & ) const = default;
#else
bool operator==( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceAddress == rhs.deviceAddress )
&& ( size == rhs.size )
&& ( pipelineDeviceAddressCaptureReplay == rhs.pipelineDeviceAddressCaptureReplay );
#endif
}
bool operator!=( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineIndirectBufferInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay = {};
};
template <>
struct CppType<StructureType, StructureType::eComputePipelineIndirectBufferInfoNV>
{
using Type = ComputePipelineIndirectBufferInfoNV;
};
// wrapper struct for struct VkConditionalRenderingBeginInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkConditionalRenderingBeginInfoEXT.html
struct ConditionalRenderingBeginInfoEXT
{
using NativeType = VkConditionalRenderingBeginInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, buffer{ buffer_ }, offset{ offset_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ConditionalRenderingBeginInfoEXT( *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs ) )
{}
ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( this );
}
operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>( this );
}
operator VkConditionalRenderingBeginInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( this );
}
operator VkConditionalRenderingBeginInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, buffer, offset, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default;
#else
bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( buffer == rhs.buffer )
&& ( offset == rhs.offset )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {};
};
template <>
struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
{
using Type = ConditionalRenderingBeginInfoEXT;
};
// wrapper struct for struct VkConformanceVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkConformanceVersion.html
struct ConformanceVersion
{
using NativeType = VkConformanceVersion;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ConformanceVersion(uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {}) VULKAN_HPP_NOEXCEPT
: major{ major_ }, minor{ minor_ }, subminor{ subminor_ }, patch{ patch_ }
{}
VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
: ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) )
{}
ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
{
major = major_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
{
minor = minor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
{
subminor = subminor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
{
patch = patch_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkConformanceVersion*>( this );
}
operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkConformanceVersion*>( this );
}
operator VkConformanceVersion const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkConformanceVersion*>( this );
}
operator VkConformanceVersion *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkConformanceVersion*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( major, minor, subminor, patch );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ConformanceVersion const & ) const = default;
#else
bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( major == rhs.major )
&& ( minor == rhs.minor )
&& ( subminor == rhs.subminor )
&& ( patch == rhs.patch );
#endif
}
bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint8_t major = {};
uint8_t minor = {};
uint8_t subminor = {};
uint8_t patch = {};
};
using ConformanceVersionKHR = ConformanceVersion;
// wrapper struct for struct VkConvertCooperativeVectorMatrixInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkConvertCooperativeVectorMatrixInfoNV.html
struct ConvertCooperativeVectorMatrixInfoNV
{
using NativeType = VkConvertCooperativeVectorMatrixInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConvertCooperativeVectorMatrixInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV(size_t srcSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR srcData_ = {}, size_t * pDstSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dstData_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeKHR srcComponentType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeKHR dstComponentType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, uint32_t numRows_ = {}, uint32_t numColumns_ = {}, VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV srcLayout_ = VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV::eRowMajor, size_t srcStride_ = {}, VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV dstLayout_ = VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV::eRowMajor, size_t dstStride_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcSize{ srcSize_ }, srcData{ srcData_ }, pDstSize{ pDstSize_ }, dstData{ dstData_ }, srcComponentType{ srcComponentType_ }, dstComponentType{ dstComponentType_ }, numRows{ numRows_ }, numColumns{ numColumns_ }, srcLayout{ srcLayout_ }, srcStride{ srcStride_ }, dstLayout{ dstLayout_ }, dstStride{ dstStride_ }
{}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV( ConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ConvertCooperativeVectorMatrixInfoNV( VkConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ConvertCooperativeVectorMatrixInfoNV( *reinterpret_cast<ConvertCooperativeVectorMatrixInfoNV const *>( &rhs ) )
{}
ConvertCooperativeVectorMatrixInfoNV & operator=( ConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ConvertCooperativeVectorMatrixInfoNV & operator=( VkConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcSize( size_t srcSize_ ) VULKAN_HPP_NOEXCEPT
{
srcSize = srcSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & srcData_ ) VULKAN_HPP_NOEXCEPT
{
srcData = srcData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setPDstSize( size_t * pDstSize_ ) VULKAN_HPP_NOEXCEPT
{
pDstSize = pDstSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dstData_ ) VULKAN_HPP_NOEXCEPT
{
dstData = dstData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcComponentType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR srcComponentType_ ) VULKAN_HPP_NOEXCEPT
{
srcComponentType = srcComponentType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstComponentType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR dstComponentType_ ) VULKAN_HPP_NOEXCEPT
{
dstComponentType = dstComponentType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setNumRows( uint32_t numRows_ ) VULKAN_HPP_NOEXCEPT
{
numRows = numRows_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setNumColumns( uint32_t numColumns_ ) VULKAN_HPP_NOEXCEPT
{
numColumns = numColumns_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcLayout( VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV srcLayout_ ) VULKAN_HPP_NOEXCEPT
{
srcLayout = srcLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcStride( size_t srcStride_ ) VULKAN_HPP_NOEXCEPT
{
srcStride = srcStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstLayout( VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV dstLayout_ ) VULKAN_HPP_NOEXCEPT
{
dstLayout = dstLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstStride( size_t dstStride_ ) VULKAN_HPP_NOEXCEPT
{
dstStride = dstStride_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkConvertCooperativeVectorMatrixInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkConvertCooperativeVectorMatrixInfoNV*>( this );
}
operator VkConvertCooperativeVectorMatrixInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkConvertCooperativeVectorMatrixInfoNV*>( this );
}
operator VkConvertCooperativeVectorMatrixInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkConvertCooperativeVectorMatrixInfoNV*>( this );
}
operator VkConvertCooperativeVectorMatrixInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkConvertCooperativeVectorMatrixInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, size_t const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, size_t * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV const &, size_t const &, VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV const &, size_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcSize, srcData, pDstSize, dstData, srcComponentType, dstComponentType, numRows, numColumns, srcLayout, srcStride, dstLayout, dstStride );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConvertCooperativeVectorMatrixInfoNV;
const void * pNext = {};
size_t srcSize = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR srcData = {};
size_t * pDstSize = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dstData = {};
VULKAN_HPP_NAMESPACE::ComponentTypeKHR srcComponentType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::ComponentTypeKHR dstComponentType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
uint32_t numRows = {};
uint32_t numColumns = {};
VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV srcLayout = VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV::eRowMajor;
size_t srcStride = {};
VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV dstLayout = VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV::eRowMajor;
size_t dstStride = {};
};
template <>
struct CppType<StructureType, StructureType::eConvertCooperativeVectorMatrixInfoNV>
{
using Type = ConvertCooperativeVectorMatrixInfoNV;
};
// wrapper struct for struct VkCooperativeMatrixFlexibleDimensionsPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeMatrixFlexibleDimensionsPropertiesNV.html
struct CooperativeMatrixFlexibleDimensionsPropertiesNV
{
using NativeType = VkCooperativeMatrixFlexibleDimensionsPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CooperativeMatrixFlexibleDimensionsPropertiesNV(uint32_t MGranularity_ = {}, uint32_t NGranularity_ = {}, uint32_t KGranularity_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation_ = {}, VULKAN_HPP_NAMESPACE::ScopeKHR scope_ = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice, uint32_t workgroupInvocations_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, MGranularity{ MGranularity_ }, NGranularity{ NGranularity_ }, KGranularity{ KGranularity_ }, AType{ AType_ }, BType{ BType_ }, CType{ CType_ }, ResultType{ ResultType_ }, saturatingAccumulation{ saturatingAccumulation_ }, scope{ scope_ }, workgroupInvocations{ workgroupInvocations_ }
{}
VULKAN_HPP_CONSTEXPR CooperativeMatrixFlexibleDimensionsPropertiesNV( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CooperativeMatrixFlexibleDimensionsPropertiesNV( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CooperativeMatrixFlexibleDimensionsPropertiesNV( *reinterpret_cast<CooperativeMatrixFlexibleDimensionsPropertiesNV const *>( &rhs ) )
{}
CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV const *>( &rhs );
return *this;
}
operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCooperativeMatrixFlexibleDimensionsPropertiesNV*>( this );
}
operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCooperativeMatrixFlexibleDimensionsPropertiesNV*>( this );
}
operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCooperativeMatrixFlexibleDimensionsPropertiesNV*>( this );
}
operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCooperativeMatrixFlexibleDimensionsPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::ScopeKHR const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, MGranularity, NGranularity, KGranularity, AType, BType, CType, ResultType, saturatingAccumulation, scope, workgroupInvocations );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CooperativeMatrixFlexibleDimensionsPropertiesNV const & ) const = default;
#else
bool operator==( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( MGranularity == rhs.MGranularity )
&& ( NGranularity == rhs.NGranularity )
&& ( KGranularity == rhs.KGranularity )
&& ( AType == rhs.AType )
&& ( BType == rhs.BType )
&& ( CType == rhs.CType )
&& ( ResultType == rhs.ResultType )
&& ( saturatingAccumulation == rhs.saturatingAccumulation )
&& ( scope == rhs.scope )
&& ( workgroupInvocations == rhs.workgroupInvocations );
#endif
}
bool operator!=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV;
void * pNext = {};
uint32_t MGranularity = {};
uint32_t NGranularity = {};
uint32_t KGranularity = {};
VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation = {};
VULKAN_HPP_NAMESPACE::ScopeKHR scope = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice;
uint32_t workgroupInvocations = {};
};
template <>
struct CppType<StructureType, StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV>
{
using Type = CooperativeMatrixFlexibleDimensionsPropertiesNV;
};
// wrapper struct for struct VkCooperativeMatrixPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeMatrixPropertiesKHR.html
struct CooperativeMatrixPropertiesKHR
{
using NativeType = VkCooperativeMatrixPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesKHR(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation_ = {}, VULKAN_HPP_NAMESPACE::ScopeKHR scope_ = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, MSize{ MSize_ }, NSize{ NSize_ }, KSize{ KSize_ }, AType{ AType_ }, BType{ BType_ }, CType{ CType_ }, ResultType{ ResultType_ }, saturatingAccumulation{ saturatingAccumulation_ }, scope{ scope_ }
{}
VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CooperativeMatrixPropertiesKHR( VkCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: CooperativeMatrixPropertiesKHR( *reinterpret_cast<CooperativeMatrixPropertiesKHR const *>( &rhs ) )
{}
CooperativeMatrixPropertiesKHR & operator=( CooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CooperativeMatrixPropertiesKHR & operator=( VkCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR const *>( &rhs );
return *this;
}
operator VkCooperativeMatrixPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCooperativeMatrixPropertiesKHR*>( this );
}
operator VkCooperativeMatrixPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCooperativeMatrixPropertiesKHR*>( this );
}
operator VkCooperativeMatrixPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCooperativeMatrixPropertiesKHR*>( this );
}
operator VkCooperativeMatrixPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCooperativeMatrixPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::ScopeKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, ResultType, saturatingAccumulation, scope );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CooperativeMatrixPropertiesKHR const & ) const = default;
#else
bool operator==( CooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( MSize == rhs.MSize )
&& ( NSize == rhs.NSize )
&& ( KSize == rhs.KSize )
&& ( AType == rhs.AType )
&& ( BType == rhs.BType )
&& ( CType == rhs.CType )
&& ( ResultType == rhs.ResultType )
&& ( saturatingAccumulation == rhs.saturatingAccumulation )
&& ( scope == rhs.scope );
#endif
}
bool operator!=( CooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesKHR;
void * pNext = {};
uint32_t MSize = {};
uint32_t NSize = {};
uint32_t KSize = {};
VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation = {};
VULKAN_HPP_NAMESPACE::ScopeKHR scope = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice;
};
template <>
struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesKHR>
{
using Type = CooperativeMatrixPropertiesKHR;
};
// wrapper struct for struct VkCooperativeMatrixPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeMatrixPropertiesNV.html
struct CooperativeMatrixPropertiesNV
{
using NativeType = VkCooperativeMatrixPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = {}, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, MSize{ MSize_ }, NSize{ NSize_ }, KSize{ KSize_ }, AType{ AType_ }, BType{ BType_ }, CType{ CType_ }, DType{ DType_ }, scope{ scope_ }
{}
VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CooperativeMatrixPropertiesNV( *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs ) )
{}
CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>( &rhs );
return *this;
}
operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV*>( this );
}
operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( this );
}
operator VkCooperativeMatrixPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCooperativeMatrixPropertiesNV*>( this );
}
operator VkCooperativeMatrixPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, VULKAN_HPP_NAMESPACE::ScopeNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, DType, scope );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default;
#else
bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( MSize == rhs.MSize )
&& ( NSize == rhs.NSize )
&& ( KSize == rhs.KSize )
&& ( AType == rhs.AType )
&& ( BType == rhs.BType )
&& ( CType == rhs.CType )
&& ( DType == rhs.DType )
&& ( scope == rhs.scope );
#endif
}
bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
void * pNext = {};
uint32_t MSize = {};
uint32_t NSize = {};
uint32_t KSize = {};
VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = {};
VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = {};
VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = {};
VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = {};
VULKAN_HPP_NAMESPACE::ScopeNV scope = {};
};
template <>
struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
{
using Type = CooperativeMatrixPropertiesNV;
};
// wrapper struct for struct VkCooperativeVectorPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeVectorPropertiesNV.html
struct CooperativeVectorPropertiesNV
{
using NativeType = VkCooperativeVectorPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeVectorPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CooperativeVectorPropertiesNV(VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputInterpretation_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeKHR matrixInterpretation_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeKHR biasInterpretation_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeKHR resultType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, VULKAN_HPP_NAMESPACE::Bool32 transpose_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, inputType{ inputType_ }, inputInterpretation{ inputInterpretation_ }, matrixInterpretation{ matrixInterpretation_ }, biasInterpretation{ biasInterpretation_ }, resultType{ resultType_ }, transpose{ transpose_ }
{}
VULKAN_HPP_CONSTEXPR CooperativeVectorPropertiesNV( CooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CooperativeVectorPropertiesNV( VkCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CooperativeVectorPropertiesNV( *reinterpret_cast<CooperativeVectorPropertiesNV const *>( &rhs ) )
{}
CooperativeVectorPropertiesNV & operator=( CooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CooperativeVectorPropertiesNV & operator=( VkCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeVectorPropertiesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setInputType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputType_ ) VULKAN_HPP_NOEXCEPT
{
inputType = inputType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setInputInterpretation( VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputInterpretation_ ) VULKAN_HPP_NOEXCEPT
{
inputInterpretation = inputInterpretation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setMatrixInterpretation( VULKAN_HPP_NAMESPACE::ComponentTypeKHR matrixInterpretation_ ) VULKAN_HPP_NOEXCEPT
{
matrixInterpretation = matrixInterpretation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setBiasInterpretation( VULKAN_HPP_NAMESPACE::ComponentTypeKHR biasInterpretation_ ) VULKAN_HPP_NOEXCEPT
{
biasInterpretation = biasInterpretation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setResultType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR resultType_ ) VULKAN_HPP_NOEXCEPT
{
resultType = resultType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setTranspose( VULKAN_HPP_NAMESPACE::Bool32 transpose_ ) VULKAN_HPP_NOEXCEPT
{
transpose = transpose_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCooperativeVectorPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCooperativeVectorPropertiesNV*>( this );
}
operator VkCooperativeVectorPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCooperativeVectorPropertiesNV*>( this );
}
operator VkCooperativeVectorPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCooperativeVectorPropertiesNV*>( this );
}
operator VkCooperativeVectorPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCooperativeVectorPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, inputType, inputInterpretation, matrixInterpretation, biasInterpretation, resultType, transpose );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CooperativeVectorPropertiesNV const & ) const = default;
#else
bool operator==( CooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( inputType == rhs.inputType )
&& ( inputInterpretation == rhs.inputInterpretation )
&& ( matrixInterpretation == rhs.matrixInterpretation )
&& ( biasInterpretation == rhs.biasInterpretation )
&& ( resultType == rhs.resultType )
&& ( transpose == rhs.transpose );
#endif
}
bool operator!=( CooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeVectorPropertiesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputInterpretation = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::ComponentTypeKHR matrixInterpretation = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::ComponentTypeKHR biasInterpretation = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::ComponentTypeKHR resultType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16;
VULKAN_HPP_NAMESPACE::Bool32 transpose = {};
};
template <>
struct CppType<StructureType, StructureType::eCooperativeVectorPropertiesNV>
{
using Type = CooperativeVectorPropertiesNV;
};
// wrapper struct for struct VkCopyAccelerationStructureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyAccelerationStructureInfoKHR.html
struct CopyAccelerationStructureInfoKHR
{
using NativeType = VkCopyAccelerationStructureInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, src{ src_ }, dst{ dst_ }, mode{ mode_ }
{}
VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyAccelerationStructureInfoKHR( *reinterpret_cast<CopyAccelerationStructureInfoKHR const *>( &rhs ) )
{}
CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
{
src = src_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
{
dst = dst_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyAccelerationStructureInfoKHR*>( this );
}
operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyAccelerationStructureInfoKHR*>( this );
}
operator VkCopyAccelerationStructureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyAccelerationStructureInfoKHR*>( this );
}
operator VkCopyAccelerationStructureInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyAccelerationStructureInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, src, dst, mode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default;
#else
bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( src == rhs.src )
&& ( dst == rhs.dst )
&& ( mode == rhs.mode );
#endif
}
bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
};
template <>
struct CppType<StructureType, StructureType::eCopyAccelerationStructureInfoKHR>
{
using Type = CopyAccelerationStructureInfoKHR;
};
// wrapper struct for struct VkCopyAccelerationStructureToMemoryInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyAccelerationStructureToMemoryInfoKHR.html
struct CopyAccelerationStructureToMemoryInfoKHR
{
using NativeType = VkCopyAccelerationStructureToMemoryInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, src{ src_ }, dst{ dst_ }, mode{ mode_ }
{}
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyAccelerationStructureToMemoryInfoKHR( *reinterpret_cast<CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs ) )
{}
CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
{
src = src_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
{
dst = dst_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
}
operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
}
operator VkCopyAccelerationStructureToMemoryInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
}
operator VkCopyAccelerationStructureToMemoryInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, src, dst, mode );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {};
VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
};
template <>
struct CppType<StructureType, StructureType::eCopyAccelerationStructureToMemoryInfoKHR>
{
using Type = CopyAccelerationStructureToMemoryInfoKHR;
};
// wrapper struct for struct VkCopyBufferInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyBufferInfo2.html
struct CopyBufferInfo2
{
using NativeType = VkCopyBufferInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyBufferInfo2(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcBuffer{ srcBuffer_ }, dstBuffer{ dstBuffer_ }, regionCount{ regionCount_ }, pRegions{ pRegions_ }
{}
VULKAN_HPP_CONSTEXPR CopyBufferInfo2( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyBufferInfo2( *reinterpret_cast<CopyBufferInfo2 const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2> const & regions_, const void * pNext_ = nullptr )
: pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CopyBufferInfo2 & operator=( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyBufferInfo2 & operator=( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
{
srcBuffer = srcBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
{
dstBuffer = dstBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = regionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyBufferInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyBufferInfo2*>( this );
}
operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyBufferInfo2*>( this );
}
operator VkCopyBufferInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyBufferInfo2*>( this );
}
operator VkCopyBufferInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyBufferInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::BufferCopy2 * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyBufferInfo2 const & ) const = default;
#else
bool operator==( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcBuffer == rhs.srcBuffer )
&& ( dstBuffer == rhs.dstBuffer )
&& ( regionCount == rhs.regionCount )
&& ( pRegions == rhs.pRegions );
#endif
}
bool operator!=( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions = {};
};
template <>
struct CppType<StructureType, StructureType::eCopyBufferInfo2>
{
using Type = CopyBufferInfo2;
};
using CopyBufferInfo2KHR = CopyBufferInfo2;
// wrapper struct for struct VkCopyBufferToImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyBufferToImageInfo2.html
struct CopyBufferToImageInfo2
{
using NativeType = VkCopyBufferToImageInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcBuffer{ srcBuffer_ }, dstImage{ dstImage_ }, dstImageLayout{ dstImageLayout_ }, regionCount{ regionCount_ }, pRegions{ pRegions_ }
{}
VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyBufferToImageInfo2( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyBufferToImageInfo2( *reinterpret_cast<CopyBufferToImageInfo2 const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_, const void * pNext_ = nullptr )
: pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CopyBufferToImageInfo2 & operator=( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyBufferToImageInfo2 & operator=( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
{
srcBuffer = srcBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
{
dstImage = dstImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
dstImageLayout = dstImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = regionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyBufferToImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyBufferToImageInfo2*>( this );
}
operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyBufferToImageInfo2*>( this );
}
operator VkCopyBufferToImageInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyBufferToImageInfo2*>( this );
}
operator VkCopyBufferToImageInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyBufferToImageInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyBufferToImageInfo2 const & ) const = default;
#else
bool operator==( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcBuffer == rhs.srcBuffer )
&& ( dstImage == rhs.dstImage )
&& ( dstImageLayout == rhs.dstImageLayout )
&& ( regionCount == rhs.regionCount )
&& ( pRegions == rhs.pRegions );
#endif
}
bool operator!=( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
VULKAN_HPP_NAMESPACE::Image dstImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {};
};
template <>
struct CppType<StructureType, StructureType::eCopyBufferToImageInfo2>
{
using Type = CopyBufferToImageInfo2;
};
using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2;
// wrapper struct for struct VkCopyCommandTransformInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyCommandTransformInfoQCOM.html
struct CopyCommandTransformInfoQCOM
{
using NativeType = VkCopyCommandTransformInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, transform{ transform_ }
{}
VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyCommandTransformInfoQCOM( *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs ) )
{}
CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
{
transform = transform_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyCommandTransformInfoQCOM*>( this );
}
operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyCommandTransformInfoQCOM*>( this );
}
operator VkCopyCommandTransformInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyCommandTransformInfoQCOM*>( this );
}
operator VkCopyCommandTransformInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyCommandTransformInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, transform );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default;
#else
bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( transform == rhs.transform );
#endif
}
bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
};
template <>
struct CppType<StructureType, StructureType::eCopyCommandTransformInfoQCOM>
{
using Type = CopyCommandTransformInfoQCOM;
};
// wrapper struct for struct VkCopyDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyDescriptorSet.html
struct CopyDescriptorSet
{
using NativeType = VkCopyDescriptorSet;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcSet{ srcSet_ }, srcBinding{ srcBinding_ }, srcArrayElement{ srcArrayElement_ }, dstSet{ dstSet_ }, dstBinding{ dstBinding_ }, dstArrayElement{ dstArrayElement_ }, descriptorCount{ descriptorCount_ }
{}
VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) )
{}
CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
{
srcSet = srcSet_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
{
srcBinding = srcBinding_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
{
srcArrayElement = srcArrayElement_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
{
dstSet = dstSet_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
{
dstBinding = dstBinding_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
{
dstArrayElement = dstArrayElement_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = descriptorCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyDescriptorSet*>( this );
}
operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyDescriptorSet*>( this );
}
operator VkCopyDescriptorSet const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyDescriptorSet*>( this );
}
operator VkCopyDescriptorSet *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyDescriptorSet*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSet const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorSet const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyDescriptorSet const & ) const = default;
#else
bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcSet == rhs.srcSet )
&& ( srcBinding == rhs.srcBinding )
&& ( srcArrayElement == rhs.srcArrayElement )
&& ( dstSet == rhs.dstSet )
&& ( dstBinding == rhs.dstBinding )
&& ( dstArrayElement == rhs.dstArrayElement )
&& ( descriptorCount == rhs.descriptorCount );
#endif
}
bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {};
uint32_t srcBinding = {};
uint32_t srcArrayElement = {};
VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
uint32_t dstBinding = {};
uint32_t dstArrayElement = {};
uint32_t descriptorCount = {};
};
template <>
struct CppType<StructureType, StructureType::eCopyDescriptorSet>
{
using Type = CopyDescriptorSet;
};
// wrapper struct for struct VkImageCopy2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCopy2.html
struct ImageCopy2
{
using NativeType = VkImageCopy2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageCopy2(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcSubresource{ srcSubresource_ }, srcOffset{ srcOffset_ }, dstSubresource{ dstSubresource_ }, dstOffset{ dstOffset_ }, extent{ extent_ }
{}
VULKAN_HPP_CONSTEXPR ImageCopy2( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageCopy2( *reinterpret_cast<ImageCopy2 const *>( &rhs ) )
{}
ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageCopy2 & operator=( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageCopy2*>( this );
}
operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageCopy2*>( this );
}
operator VkImageCopy2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageCopy2*>( this );
}
operator VkImageCopy2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageCopy2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageCopy2 const & ) const = default;
#else
bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcSubresource == rhs.srcSubresource )
&& ( srcOffset == rhs.srcOffset )
&& ( dstSubresource == rhs.dstSubresource )
&& ( dstOffset == rhs.dstOffset )
&& ( extent == rhs.extent );
#endif
}
bool operator!=( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
};
template <>
struct CppType<StructureType, StructureType::eImageCopy2>
{
using Type = ImageCopy2;
};
using ImageCopy2KHR = ImageCopy2;
// wrapper struct for struct VkCopyImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageInfo2.html
struct CopyImageInfo2
{
using NativeType = VkCopyImageInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcImage{ srcImage_ }, srcImageLayout{ srcImageLayout_ }, dstImage{ dstImage_ }, dstImageLayout{ dstImageLayout_ }, regionCount{ regionCount_ }, pRegions{ pRegions_ }
{}
VULKAN_HPP_CONSTEXPR CopyImageInfo2( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyImageInfo2( *reinterpret_cast<CopyImageInfo2 const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_, const void * pNext_ = nullptr )
: pNext( pNext_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CopyImageInfo2 & operator=( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyImageInfo2 & operator=( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
{
srcImage = srcImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
srcImageLayout = srcImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
{
dstImage = dstImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
dstImageLayout = dstImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = regionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyImageInfo2*>( this );
}
operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyImageInfo2*>( this );
}
operator VkCopyImageInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyImageInfo2*>( this );
}
operator VkCopyImageInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyImageInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageCopy2 * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyImageInfo2 const & ) const = default;
#else
bool operator==( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcImage == rhs.srcImage )
&& ( srcImageLayout == rhs.srcImageLayout )
&& ( dstImage == rhs.dstImage )
&& ( dstImageLayout == rhs.dstImageLayout )
&& ( regionCount == rhs.regionCount )
&& ( pRegions == rhs.pRegions );
#endif
}
bool operator!=( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image srcImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::Image dstImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {};
};
template <>
struct CppType<StructureType, StructureType::eCopyImageInfo2>
{
using Type = CopyImageInfo2;
};
using CopyImageInfo2KHR = CopyImageInfo2;
// wrapper struct for struct VkCopyImageToBufferInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageToBufferInfo2.html
struct CopyImageToBufferInfo2
{
using NativeType = VkCopyImageToBufferInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcImage{ srcImage_ }, srcImageLayout{ srcImageLayout_ }, dstBuffer{ dstBuffer_ }, regionCount{ regionCount_ }, pRegions{ pRegions_ }
{}
VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyImageToBufferInfo2( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyImageToBufferInfo2( *reinterpret_cast<CopyImageToBufferInfo2 const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_, const void * pNext_ = nullptr )
: pNext( pNext_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CopyImageToBufferInfo2 & operator=( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyImageToBufferInfo2 & operator=( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
{
srcImage = srcImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
srcImageLayout = srcImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
{
dstBuffer = dstBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = regionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyImageToBufferInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyImageToBufferInfo2*>( this );
}
operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyImageToBufferInfo2*>( this );
}
operator VkCopyImageToBufferInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyImageToBufferInfo2*>( this );
}
operator VkCopyImageToBufferInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyImageToBufferInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyImageToBufferInfo2 const & ) const = default;
#else
bool operator==( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcImage == rhs.srcImage )
&& ( srcImageLayout == rhs.srcImageLayout )
&& ( dstBuffer == rhs.dstBuffer )
&& ( regionCount == rhs.regionCount )
&& ( pRegions == rhs.pRegions );
#endif
}
bool operator!=( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image srcImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {};
};
template <>
struct CppType<StructureType, StructureType::eCopyImageToBufferInfo2>
{
using Type = CopyImageToBufferInfo2;
};
using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2;
// wrapper struct for struct VkCopyImageToImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageToImageInfo.html
struct CopyImageToImageInfo
{
using NativeType = VkCopyImageToImageInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToImageInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyImageToImageInfo(VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, srcImage{ srcImage_ }, srcImageLayout{ srcImageLayout_ }, dstImage{ dstImage_ }, dstImageLayout{ dstImageLayout_ }, regionCount{ regionCount_ }, pRegions{ pRegions_ }
{}
VULKAN_HPP_CONSTEXPR CopyImageToImageInfo( CopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyImageToImageInfo( VkCopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyImageToImageInfo( *reinterpret_cast<CopyImageToImageInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyImageToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_, VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CopyImageToImageInfo & operator=( CopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyImageToImageInfo & operator=( VkCopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToImageInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
{
srcImage = srcImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
srcImageLayout = srcImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
{
dstImage = dstImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
dstImageLayout = dstImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = regionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyImageToImageInfo & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyImageToImageInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyImageToImageInfo*>( this );
}
operator VkCopyImageToImageInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyImageToImageInfo*>( this );
}
operator VkCopyImageToImageInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyImageToImageInfo*>( this );
}
operator VkCopyImageToImageInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyImageToImageInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::HostImageCopyFlags const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageCopy2 * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyImageToImageInfo const & ) const = default;
#else
bool operator==( CopyImageToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( srcImage == rhs.srcImage )
&& ( srcImageLayout == rhs.srcImageLayout )
&& ( dstImage == rhs.dstImage )
&& ( dstImageLayout == rhs.dstImageLayout )
&& ( regionCount == rhs.regionCount )
&& ( pRegions == rhs.pRegions );
#endif
}
bool operator!=( CopyImageToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToImageInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags = {};
VULKAN_HPP_NAMESPACE::Image srcImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::Image dstImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {};
};
template <>
struct CppType<StructureType, StructureType::eCopyImageToImageInfo>
{
using Type = CopyImageToImageInfo;
};
using CopyImageToImageInfoEXT = CopyImageToImageInfo;
// wrapper struct for struct VkImageToMemoryCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageToMemoryCopy.html
struct ImageToMemoryCopy
{
using NativeType = VkImageToMemoryCopy;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageToMemoryCopy;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageToMemoryCopy(void * pHostPointer_ = {}, uint32_t memoryRowLength_ = {}, uint32_t memoryImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pHostPointer{ pHostPointer_ }, memoryRowLength{ memoryRowLength_ }, memoryImageHeight{ memoryImageHeight_ }, imageSubresource{ imageSubresource_ }, imageOffset{ imageOffset_ }, imageExtent{ imageExtent_ }
{}
VULKAN_HPP_CONSTEXPR ImageToMemoryCopy( ImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageToMemoryCopy( VkImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageToMemoryCopy( *reinterpret_cast<ImageToMemoryCopy const *>( &rhs ) )
{}
ImageToMemoryCopy & operator=( ImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageToMemoryCopy & operator=( VkImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageToMemoryCopy const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT
{
pHostPointer = pHostPointer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT
{
memoryRowLength = memoryRowLength_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT
{
memoryImageHeight = memoryImageHeight_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
{
imageSubresource = imageSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
{
imageOffset = imageOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageToMemoryCopy const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageToMemoryCopy*>( this );
}
operator VkImageToMemoryCopy &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageToMemoryCopy*>( this );
}
operator VkImageToMemoryCopy const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageToMemoryCopy*>( this );
}
operator VkImageToMemoryCopy *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageToMemoryCopy*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageToMemoryCopy const & ) const = default;
#else
bool operator==( ImageToMemoryCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pHostPointer == rhs.pHostPointer )
&& ( memoryRowLength == rhs.memoryRowLength )
&& ( memoryImageHeight == rhs.memoryImageHeight )
&& ( imageSubresource == rhs.imageSubresource )
&& ( imageOffset == rhs.imageOffset )
&& ( imageExtent == rhs.imageExtent );
#endif
}
bool operator!=( ImageToMemoryCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageToMemoryCopy;
const void * pNext = {};
void * pHostPointer = {};
uint32_t memoryRowLength = {};
uint32_t memoryImageHeight = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
};
template <>
struct CppType<StructureType, StructureType::eImageToMemoryCopy>
{
using Type = ImageToMemoryCopy;
};
using ImageToMemoryCopyEXT = ImageToMemoryCopy;
// wrapper struct for struct VkCopyImageToMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageToMemoryInfo.html
struct CopyImageToMemoryInfo
{
using NativeType = VkCopyImageToMemoryInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToMemoryInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfo(VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, srcImage{ srcImage_ }, srcImageLayout{ srcImageLayout_ }, regionCount{ regionCount_ }, pRegions{ pRegions_ }
{}
VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfo( CopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyImageToMemoryInfo( VkCopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyImageToMemoryInfo( *reinterpret_cast<CopyImageToMemoryInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyImageToMemoryInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_, VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy> const & regions_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CopyImageToMemoryInfo & operator=( CopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyImageToMemoryInfo & operator=( VkCopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
{
srcImage = srcImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
srcImageLayout = srcImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = regionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setPRegions( const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyImageToMemoryInfo & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyImageToMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyImageToMemoryInfo*>( this );
}
operator VkCopyImageToMemoryInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyImageToMemoryInfo*>( this );
}
operator VkCopyImageToMemoryInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyImageToMemoryInfo*>( this );
}
operator VkCopyImageToMemoryInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyImageToMemoryInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::HostImageCopyFlags const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, srcImage, srcImageLayout, regionCount, pRegions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyImageToMemoryInfo const & ) const = default;
#else
bool operator==( CopyImageToMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( srcImage == rhs.srcImage )
&& ( srcImageLayout == rhs.srcImageLayout )
&& ( regionCount == rhs.regionCount )
&& ( pRegions == rhs.pRegions );
#endif
}
bool operator!=( CopyImageToMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToMemoryInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags = {};
VULKAN_HPP_NAMESPACE::Image srcImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * pRegions = {};
};
template <>
struct CppType<StructureType, StructureType::eCopyImageToMemoryInfo>
{
using Type = CopyImageToMemoryInfo;
};
using CopyImageToMemoryInfoEXT = CopyImageToMemoryInfo;
// wrapper struct for struct VkCopyMemoryIndirectCommandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryIndirectCommandNV.html
struct CopyMemoryIndirectCommandNV
{
using NativeType = VkCopyMemoryIndirectCommandNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
: srcAddress{ srcAddress_ }, dstAddress{ dstAddress_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV( CopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyMemoryIndirectCommandNV( VkCopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyMemoryIndirectCommandNV( *reinterpret_cast<CopyMemoryIndirectCommandNV const *>( &rhs ) )
{}
CopyMemoryIndirectCommandNV & operator=( CopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyMemoryIndirectCommandNV & operator=( VkCopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT
{
srcAddress = srcAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setDstAddress( VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT
{
dstAddress = dstAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyMemoryIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyMemoryIndirectCommandNV*>( this );
}
operator VkCopyMemoryIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyMemoryIndirectCommandNV*>( this );
}
operator VkCopyMemoryIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyMemoryIndirectCommandNV*>( this );
}
operator VkCopyMemoryIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyMemoryIndirectCommandNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( srcAddress, dstAddress, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyMemoryIndirectCommandNV const & ) const = default;
#else
bool operator==( CopyMemoryIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( srcAddress == rhs.srcAddress )
&& ( dstAddress == rhs.dstAddress )
&& ( size == rhs.size );
#endif
}
bool operator!=( CopyMemoryIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {};
VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
// wrapper struct for struct VkCopyMemoryToAccelerationStructureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToAccelerationStructureInfoKHR.html
struct CopyMemoryToAccelerationStructureInfoKHR
{
using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, src{ src_ }, dst{ dst_ }, mode{ mode_ }
{}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast<CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs ) )
{}
CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
{
src = src_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
{
dst = dst_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
}
operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
}
operator VkCopyMemoryToAccelerationStructureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
}
operator VkCopyMemoryToAccelerationStructureInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, src, dst, mode );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
};
template <>
struct CppType<StructureType, StructureType::eCopyMemoryToAccelerationStructureInfoKHR>
{
using Type = CopyMemoryToAccelerationStructureInfoKHR;
};
// wrapper struct for struct VkCopyMemoryToImageIndirectCommandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToImageIndirectCommandNV.html
struct CopyMemoryToImageIndirectCommandNV
{
using NativeType = VkCopyMemoryToImageIndirectCommandNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
: srcAddress{ srcAddress_ }, bufferRowLength{ bufferRowLength_ }, bufferImageHeight{ bufferImageHeight_ }, imageSubresource{ imageSubresource_ }, imageOffset{ imageOffset_ }, imageExtent{ imageExtent_ }
{}
VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandNV( CopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyMemoryToImageIndirectCommandNV( VkCopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyMemoryToImageIndirectCommandNV( *reinterpret_cast<CopyMemoryToImageIndirectCommandNV const *>( &rhs ) )
{}
CopyMemoryToImageIndirectCommandNV & operator=( CopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyMemoryToImageIndirectCommandNV & operator=( VkCopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT
{
srcAddress = srcAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
{
bufferRowLength = bufferRowLength_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
{
bufferImageHeight = bufferImageHeight_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
{
imageSubresource = imageSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
{
imageOffset = imageOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyMemoryToImageIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyMemoryToImageIndirectCommandNV*>( this );
}
operator VkCopyMemoryToImageIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyMemoryToImageIndirectCommandNV*>( this );
}
operator VkCopyMemoryToImageIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyMemoryToImageIndirectCommandNV*>( this );
}
operator VkCopyMemoryToImageIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyMemoryToImageIndirectCommandNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( srcAddress, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyMemoryToImageIndirectCommandNV const & ) const = default;
#else
bool operator==( CopyMemoryToImageIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( srcAddress == rhs.srcAddress )
&& ( bufferRowLength == rhs.bufferRowLength )
&& ( bufferImageHeight == rhs.bufferImageHeight )
&& ( imageSubresource == rhs.imageSubresource )
&& ( imageOffset == rhs.imageOffset )
&& ( imageExtent == rhs.imageExtent );
#endif
}
bool operator!=( CopyMemoryToImageIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {};
uint32_t bufferRowLength = {};
uint32_t bufferImageHeight = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
};
// wrapper struct for struct VkMemoryToImageCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryToImageCopy.html
struct MemoryToImageCopy
{
using NativeType = VkMemoryToImageCopy;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryToImageCopy;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryToImageCopy(const void * pHostPointer_ = {}, uint32_t memoryRowLength_ = {}, uint32_t memoryImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pHostPointer{ pHostPointer_ }, memoryRowLength{ memoryRowLength_ }, memoryImageHeight{ memoryImageHeight_ }, imageSubresource{ imageSubresource_ }, imageOffset{ imageOffset_ }, imageExtent{ imageExtent_ }
{}
VULKAN_HPP_CONSTEXPR MemoryToImageCopy( MemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryToImageCopy( VkMemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryToImageCopy( *reinterpret_cast<MemoryToImageCopy const *>( &rhs ) )
{}
MemoryToImageCopy & operator=( MemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryToImageCopy & operator=( VkMemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryToImageCopy const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setPHostPointer( const void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT
{
pHostPointer = pHostPointer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT
{
memoryRowLength = memoryRowLength_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT
{
memoryImageHeight = memoryImageHeight_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
{
imageSubresource = imageSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
{
imageOffset = imageOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryToImageCopy const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryToImageCopy*>( this );
}
operator VkMemoryToImageCopy &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryToImageCopy*>( this );
}
operator VkMemoryToImageCopy const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryToImageCopy*>( this );
}
operator VkMemoryToImageCopy *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryToImageCopy*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryToImageCopy const & ) const = default;
#else
bool operator==( MemoryToImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pHostPointer == rhs.pHostPointer )
&& ( memoryRowLength == rhs.memoryRowLength )
&& ( memoryImageHeight == rhs.memoryImageHeight )
&& ( imageSubresource == rhs.imageSubresource )
&& ( imageOffset == rhs.imageOffset )
&& ( imageExtent == rhs.imageExtent );
#endif
}
bool operator!=( MemoryToImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryToImageCopy;
const void * pNext = {};
const void * pHostPointer = {};
uint32_t memoryRowLength = {};
uint32_t memoryImageHeight = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryToImageCopy>
{
using Type = MemoryToImageCopy;
};
using MemoryToImageCopyEXT = MemoryToImageCopy;
// wrapper struct for struct VkCopyMemoryToImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToImageInfo.html
struct CopyMemoryToImageInfo
{
using NativeType = VkCopyMemoryToImageInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToImageInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfo(VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, dstImage{ dstImage_ }, dstImageLayout{ dstImageLayout_ }, regionCount{ regionCount_ }, pRegions{ pRegions_ }
{}
VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfo( CopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyMemoryToImageInfo( VkCopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyMemoryToImageInfo( *reinterpret_cast<CopyMemoryToImageInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyMemoryToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryToImageCopy> const & regions_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CopyMemoryToImageInfo & operator=( CopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyMemoryToImageInfo & operator=( VkCopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
{
dstImage = dstImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
dstImageLayout = dstImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = regionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setPRegions( const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyMemoryToImageInfo & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryToImageCopy> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyMemoryToImageInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyMemoryToImageInfo*>( this );
}
operator VkCopyMemoryToImageInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyMemoryToImageInfo*>( this );
}
operator VkCopyMemoryToImageInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyMemoryToImageInfo*>( this );
}
operator VkCopyMemoryToImageInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyMemoryToImageInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::HostImageCopyFlags const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, dstImage, dstImageLayout, regionCount, pRegions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyMemoryToImageInfo const & ) const = default;
#else
bool operator==( CopyMemoryToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( dstImage == rhs.dstImage )
&& ( dstImageLayout == rhs.dstImageLayout )
&& ( regionCount == rhs.regionCount )
&& ( pRegions == rhs.pRegions );
#endif
}
bool operator!=( CopyMemoryToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToImageInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags = {};
VULKAN_HPP_NAMESPACE::Image dstImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * pRegions = {};
};
template <>
struct CppType<StructureType, StructureType::eCopyMemoryToImageInfo>
{
using Type = CopyMemoryToImageInfo;
};
using CopyMemoryToImageInfoEXT = CopyMemoryToImageInfo;
// wrapper struct for struct VkCopyMemoryToMicromapInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToMicromapInfoEXT.html
struct CopyMemoryToMicromapInfoEXT
{
using NativeType = VkCopyMemoryToMicromapInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToMicromapInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {}, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, src{ src_ }, dst{ dst_ }, mode{ mode_ }
{}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyMemoryToMicromapInfoEXT( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyMemoryToMicromapInfoEXT( *reinterpret_cast<CopyMemoryToMicromapInfoEXT const *>( &rhs ) )
{}
CopyMemoryToMicromapInfoEXT & operator=( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyMemoryToMicromapInfoEXT & operator=( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
{
src = src_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT
{
dst = dst_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyMemoryToMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT*>( this );
}
operator VkCopyMemoryToMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyMemoryToMicromapInfoEXT*>( this );
}
operator VkCopyMemoryToMicromapInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT*>( this );
}
operator VkCopyMemoryToMicromapInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyMemoryToMicromapInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, src, dst, mode );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToMicromapInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
VULKAN_HPP_NAMESPACE::MicromapEXT dst = {};
VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone;
};
template <>
struct CppType<StructureType, StructureType::eCopyMemoryToMicromapInfoEXT>
{
using Type = CopyMemoryToMicromapInfoEXT;
};
// wrapper struct for struct VkCopyMicromapInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMicromapInfoEXT.html
struct CopyMicromapInfoEXT
{
using NativeType = VkCopyMicromapInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT(VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {}, VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {}, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, src{ src_ }, dst{ dst_ }, mode{ mode_ }
{}
VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyMicromapInfoEXT( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyMicromapInfoEXT( *reinterpret_cast<CopyMicromapInfoEXT const *>( &rhs ) )
{}
CopyMicromapInfoEXT & operator=( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyMicromapInfoEXT & operator=( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT
{
src = src_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT
{
dst = dst_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyMicromapInfoEXT*>( this );
}
operator VkCopyMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyMicromapInfoEXT*>( this );
}
operator VkCopyMicromapInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyMicromapInfoEXT*>( this );
}
operator VkCopyMicromapInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyMicromapInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, src, dst, mode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyMicromapInfoEXT const & ) const = default;
#else
bool operator==( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( src == rhs.src )
&& ( dst == rhs.dst )
&& ( mode == rhs.mode );
#endif
}
bool operator!=( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::MicromapEXT src = {};
VULKAN_HPP_NAMESPACE::MicromapEXT dst = {};
VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone;
};
template <>
struct CppType<StructureType, StructureType::eCopyMicromapInfoEXT>
{
using Type = CopyMicromapInfoEXT;
};
// wrapper struct for struct VkCopyMicromapToMemoryInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMicromapToMemoryInfoEXT.html
struct CopyMicromapToMemoryInfoEXT
{
using NativeType = VkCopyMicromapToMemoryInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapToMemoryInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT(VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, src{ src_ }, dst{ dst_ }, mode{ mode_ }
{}
VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyMicromapToMemoryInfoEXT( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyMicromapToMemoryInfoEXT( *reinterpret_cast<CopyMicromapToMemoryInfoEXT const *>( &rhs ) )
{}
CopyMicromapToMemoryInfoEXT & operator=( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyMicromapToMemoryInfoEXT & operator=( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT
{
src = src_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
{
dst = dst_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyMicromapToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT*>( this );
}
operator VkCopyMicromapToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyMicromapToMemoryInfoEXT*>( this );
}
operator VkCopyMicromapToMemoryInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT*>( this );
}
operator VkCopyMicromapToMemoryInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyMicromapToMemoryInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, src, dst, mode );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapToMemoryInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::MicromapEXT src = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {};
VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone;
};
template <>
struct CppType<StructureType, StructureType::eCopyMicromapToMemoryInfoEXT>
{
using Type = CopyMicromapToMemoryInfoEXT;
};
// wrapper struct for struct VkTensorCopyARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorCopyARM.html
struct TensorCopyARM
{
using NativeType = VkTensorCopyARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorCopyARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TensorCopyARM(uint32_t dimensionCount_ = {}, const uint64_t * pSrcOffset_ = {}, const uint64_t * pDstOffset_ = {}, const uint64_t * pExtent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dimensionCount{ dimensionCount_ }, pSrcOffset{ pSrcOffset_ }, pDstOffset{ pDstOffset_ }, pExtent{ pExtent_ }
{}
VULKAN_HPP_CONSTEXPR TensorCopyARM( TensorCopyARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TensorCopyARM( VkTensorCopyARM const & rhs ) VULKAN_HPP_NOEXCEPT
: TensorCopyARM( *reinterpret_cast<TensorCopyARM const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
TensorCopyARM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & srcOffset_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & dstOffset_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & extent_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), dimensionCount( static_cast<uint32_t>( srcOffset_.size() ) ), pSrcOffset( srcOffset_.data() ), pDstOffset( dstOffset_.data() ), pExtent( extent_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( srcOffset_.empty() || dstOffset_.empty() || ( srcOffset_.size() == dstOffset_.size() ) );
VULKAN_HPP_ASSERT( srcOffset_.empty() || extent_.empty() || ( srcOffset_.size() == extent_.size() ) );
VULKAN_HPP_ASSERT( dstOffset_.empty() || extent_.empty() || ( dstOffset_.size() == extent_.size() ) );
#else
if ( !srcOffset_.empty() && !dstOffset_.empty() && ( srcOffset_.size() != dstOffset_.size() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::TensorCopyARM::TensorCopyARM: !srcOffset_.empty() && !dstOffset_.empty() && ( srcOffset_.size() != dstOffset_.size() )" );
}
if ( !srcOffset_.empty() && !extent_.empty() && ( srcOffset_.size() != extent_.size() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::TensorCopyARM::TensorCopyARM: !srcOffset_.empty() && !extent_.empty() && ( srcOffset_.size() != extent_.size() )" );
}
if ( !dstOffset_.empty() && !extent_.empty() && ( dstOffset_.size() != extent_.size() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::TensorCopyARM::TensorCopyARM: !dstOffset_.empty() && !extent_.empty() && ( dstOffset_.size() != extent_.size() )" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
TensorCopyARM & operator=( TensorCopyARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TensorCopyARM & operator=( VkTensorCopyARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TensorCopyARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setDimensionCount( uint32_t dimensionCount_ ) VULKAN_HPP_NOEXCEPT
{
dimensionCount = dimensionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setPSrcOffset( const uint64_t * pSrcOffset_ ) VULKAN_HPP_NOEXCEPT
{
pSrcOffset = pSrcOffset_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
TensorCopyARM & setSrcOffset( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
dimensionCount = static_cast<uint32_t>( srcOffset_.size() );
pSrcOffset = srcOffset_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setPDstOffset( const uint64_t * pDstOffset_ ) VULKAN_HPP_NOEXCEPT
{
pDstOffset = pDstOffset_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
TensorCopyARM & setDstOffset( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dimensionCount = static_cast<uint32_t>( dstOffset_.size() );
pDstOffset = dstOffset_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setPExtent( const uint64_t * pExtent_ ) VULKAN_HPP_NOEXCEPT
{
pExtent = pExtent_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
TensorCopyARM & setExtent( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & extent_ ) VULKAN_HPP_NOEXCEPT
{
dimensionCount = static_cast<uint32_t>( extent_.size() );
pExtent = extent_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTensorCopyARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTensorCopyARM*>( this );
}
operator VkTensorCopyARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTensorCopyARM*>( this );
}
operator VkTensorCopyARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTensorCopyARM*>( this );
}
operator VkTensorCopyARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTensorCopyARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &, const uint64_t * const &, const uint64_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dimensionCount, pSrcOffset, pDstOffset, pExtent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TensorCopyARM const & ) const = default;
#else
bool operator==( TensorCopyARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dimensionCount == rhs.dimensionCount )
&& ( pSrcOffset == rhs.pSrcOffset )
&& ( pDstOffset == rhs.pDstOffset )
&& ( pExtent == rhs.pExtent );
#endif
}
bool operator!=( TensorCopyARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTensorCopyARM;
const void * pNext = {};
uint32_t dimensionCount = {};
const uint64_t * pSrcOffset = {};
const uint64_t * pDstOffset = {};
const uint64_t * pExtent = {};
};
template <>
struct CppType<StructureType, StructureType::eTensorCopyARM>
{
using Type = TensorCopyARM;
};
// wrapper struct for struct VkCopyTensorInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyTensorInfoARM.html
struct CopyTensorInfoARM
{
using NativeType = VkCopyTensorInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyTensorInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyTensorInfoARM(VULKAN_HPP_NAMESPACE::TensorARM srcTensor_ = {}, VULKAN_HPP_NAMESPACE::TensorARM dstTensor_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::TensorCopyARM * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcTensor{ srcTensor_ }, dstTensor{ dstTensor_ }, regionCount{ regionCount_ }, pRegions{ pRegions_ }
{}
VULKAN_HPP_CONSTEXPR CopyTensorInfoARM( CopyTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyTensorInfoARM( VkCopyTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyTensorInfoARM( *reinterpret_cast<CopyTensorInfoARM const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyTensorInfoARM( VULKAN_HPP_NAMESPACE::TensorARM srcTensor_, VULKAN_HPP_NAMESPACE::TensorARM dstTensor_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::TensorCopyARM> const & regions_, const void * pNext_ = nullptr )
: pNext( pNext_ ), srcTensor( srcTensor_ ), dstTensor( dstTensor_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CopyTensorInfoARM & operator=( CopyTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CopyTensorInfoARM & operator=( VkCopyTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyTensorInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setSrcTensor( VULKAN_HPP_NAMESPACE::TensorARM srcTensor_ ) VULKAN_HPP_NOEXCEPT
{
srcTensor = srcTensor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setDstTensor( VULKAN_HPP_NAMESPACE::TensorARM dstTensor_ ) VULKAN_HPP_NOEXCEPT
{
dstTensor = dstTensor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = regionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setPRegions( const VULKAN_HPP_NAMESPACE::TensorCopyARM * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CopyTensorInfoARM & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::TensorCopyARM> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCopyTensorInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyTensorInfoARM*>( this );
}
operator VkCopyTensorInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyTensorInfoARM*>( this );
}
operator VkCopyTensorInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCopyTensorInfoARM*>( this );
}
operator VkCopyTensorInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCopyTensorInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TensorARM const &, VULKAN_HPP_NAMESPACE::TensorARM const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::TensorCopyARM * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcTensor, dstTensor, regionCount, pRegions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CopyTensorInfoARM const & ) const = default;
#else
bool operator==( CopyTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcTensor == rhs.srcTensor )
&& ( dstTensor == rhs.dstTensor )
&& ( regionCount == rhs.regionCount )
&& ( pRegions == rhs.pRegions );
#endif
}
bool operator!=( CopyTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyTensorInfoARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TensorARM srcTensor = {};
VULKAN_HPP_NAMESPACE::TensorARM dstTensor = {};
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::TensorCopyARM * pRegions = {};
};
template <>
struct CppType<StructureType, StructureType::eCopyTensorInfoARM>
{
using Type = CopyTensorInfoARM;
};
// wrapper struct for struct VkCuFunctionCreateInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuFunctionCreateInfoNVX.html
struct CuFunctionCreateInfoNVX
{
using NativeType = VkCuFunctionCreateInfoNVX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, module{ module_ }, pName{ pName_ }
{}
VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: CuFunctionCreateInfoNVX( *reinterpret_cast<CuFunctionCreateInfoNVX const *>( &rhs ) )
{}
CuFunctionCreateInfoNVX & operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT
{
module = module_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
{
pName = pName_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCuFunctionCreateInfoNVX*>( this );
}
operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCuFunctionCreateInfoNVX*>( this );
}
operator VkCuFunctionCreateInfoNVX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCuFunctionCreateInfoNVX*>( this );
}
operator VkCuFunctionCreateInfoNVX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCuFunctionCreateInfoNVX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CuModuleNVX const &, const char * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, module, pName );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = module <=> rhs.module; cmp != 0 ) return cmp;
if ( pName != rhs.pName )
if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( module == rhs.module )
&& ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) );
}
bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuFunctionCreateInfoNVX;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::CuModuleNVX module = {};
const char * pName = {};
};
template <>
struct CppType<StructureType, StructureType::eCuFunctionCreateInfoNVX>
{
using Type = CuFunctionCreateInfoNVX;
};
// wrapper struct for struct VkCuLaunchInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuLaunchInfoNVX.html
struct CuLaunchInfoNVX
{
using NativeType = VkCuLaunchInfoNVX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ = {}, uint32_t gridDimX_ = {}, uint32_t gridDimY_ = {}, uint32_t gridDimZ_ = {}, uint32_t blockDimX_ = {}, uint32_t blockDimY_ = {}, uint32_t blockDimZ_ = {}, uint32_t sharedMemBytes_ = {}, size_t paramCount_ = {}, const void * const * pParams_ = {}, size_t extraCount_ = {}, const void * const * pExtras_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, function{ function_ }, gridDimX{ gridDimX_ }, gridDimY{ gridDimY_ }, gridDimZ{ gridDimZ_ }, blockDimX{ blockDimX_ }, blockDimY{ blockDimY_ }, blockDimZ{ blockDimZ_ }, sharedMemBytes{ sharedMemBytes_ }, paramCount{ paramCount_ }, pParams{ pParams_ }, extraCount{ extraCount_ }, pExtras{ pExtras_ }
{}
VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: CuLaunchInfoNVX( *reinterpret_cast<CuLaunchInfoNVX const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_, uint32_t gridDimX_, uint32_t gridDimY_, uint32_t gridDimZ_, uint32_t blockDimX_, uint32_t blockDimY_, uint32_t blockDimZ_, uint32_t sharedMemBytes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), function( function_ ), gridDimX( gridDimX_ ), gridDimY( gridDimY_ ), gridDimZ( gridDimZ_ ), blockDimX( blockDimX_ ), blockDimY( blockDimY_ ), blockDimZ( blockDimZ_ ), sharedMemBytes( sharedMemBytes_ ), paramCount( params_.size() ), pParams( params_.data() ), extraCount( extras_.size() ), pExtras( extras_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT
{
function = function_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT
{
gridDimX = gridDimX_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT
{
gridDimY = gridDimY_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT
{
gridDimZ = gridDimZ_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT
{
blockDimX = blockDimX_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT
{
blockDimY = blockDimY_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT
{
blockDimZ = blockDimZ_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT
{
sharedMemBytes = sharedMemBytes_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT
{
paramCount = paramCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT
{
pParams = pParams_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CuLaunchInfoNVX & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_ ) VULKAN_HPP_NOEXCEPT
{
paramCount = params_.size();
pParams = params_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT
{
extraCount = extraCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT
{
pExtras = pExtras_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CuLaunchInfoNVX & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ ) VULKAN_HPP_NOEXCEPT
{
extraCount = extras_.size();
pExtras = extras_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCuLaunchInfoNVX*>( this );
}
operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCuLaunchInfoNVX*>( this );
}
operator VkCuLaunchInfoNVX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCuLaunchInfoNVX*>( this );
}
operator VkCuLaunchInfoNVX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCuLaunchInfoNVX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CuFunctionNVX const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, size_t const &, const void * const * const &, size_t const &, const void * const * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CuLaunchInfoNVX const & ) const = default;
#else
bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( function == rhs.function )
&& ( gridDimX == rhs.gridDimX )
&& ( gridDimY == rhs.gridDimY )
&& ( gridDimZ == rhs.gridDimZ )
&& ( blockDimX == rhs.blockDimX )
&& ( blockDimY == rhs.blockDimY )
&& ( blockDimZ == rhs.blockDimZ )
&& ( sharedMemBytes == rhs.sharedMemBytes )
&& ( paramCount == rhs.paramCount )
&& ( pParams == rhs.pParams )
&& ( extraCount == rhs.extraCount )
&& ( pExtras == rhs.pExtras );
#endif
}
bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuLaunchInfoNVX;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::CuFunctionNVX function = {};
uint32_t gridDimX = {};
uint32_t gridDimY = {};
uint32_t gridDimZ = {};
uint32_t blockDimX = {};
uint32_t blockDimY = {};
uint32_t blockDimZ = {};
uint32_t sharedMemBytes = {};
size_t paramCount = {};
const void * const * pParams = {};
size_t extraCount = {};
const void * const * pExtras = {};
};
template <>
struct CppType<StructureType, StructureType::eCuLaunchInfoNVX>
{
using Type = CuLaunchInfoNVX;
};
// wrapper struct for struct VkCuModuleCreateInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuModuleCreateInfoNVX.html
struct CuModuleCreateInfoNVX
{
using NativeType = VkCuModuleCreateInfoNVX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX(size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dataSize{ dataSize_ }, pData{ pData_ }
{}
VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: CuModuleCreateInfoNVX( *reinterpret_cast<CuModuleCreateInfoNVX const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
CuModuleCreateInfoNVX( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_, const void * pNext_ = nullptr )
: pNext( pNext_ ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CuModuleCreateInfoNVX & operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = dataSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
{
pData = pData_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
CuModuleCreateInfoNVX & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = data_.size() * sizeof(T);
pData = data_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCuModuleCreateInfoNVX*>( this );
}
operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCuModuleCreateInfoNVX*>( this );
}
operator VkCuModuleCreateInfoNVX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCuModuleCreateInfoNVX*>( this );
}
operator VkCuModuleCreateInfoNVX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCuModuleCreateInfoNVX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, size_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dataSize, pData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CuModuleCreateInfoNVX const & ) const = default;
#else
bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dataSize == rhs.dataSize )
&& ( pData == rhs.pData );
#endif
}
bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleCreateInfoNVX;
const void * pNext = {};
size_t dataSize = {};
const void * pData = {};
};
template <>
struct CppType<StructureType, StructureType::eCuModuleCreateInfoNVX>
{
using Type = CuModuleCreateInfoNVX;
};
// wrapper struct for struct VkCuModuleTexturingModeCreateInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuModuleTexturingModeCreateInfoNVX.html
struct CuModuleTexturingModeCreateInfoNVX
{
using NativeType = VkCuModuleTexturingModeCreateInfoNVX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleTexturingModeCreateInfoNVX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CuModuleTexturingModeCreateInfoNVX(VULKAN_HPP_NAMESPACE::Bool32 use64bitTexturing_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, use64bitTexturing{ use64bitTexturing_ }
{}
VULKAN_HPP_CONSTEXPR CuModuleTexturingModeCreateInfoNVX( CuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CuModuleTexturingModeCreateInfoNVX( VkCuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: CuModuleTexturingModeCreateInfoNVX( *reinterpret_cast<CuModuleTexturingModeCreateInfoNVX const *>( &rhs ) )
{}
CuModuleTexturingModeCreateInfoNVX & operator=( CuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CuModuleTexturingModeCreateInfoNVX & operator=( VkCuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuModuleTexturingModeCreateInfoNVX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX & setUse64bitTexturing( VULKAN_HPP_NAMESPACE::Bool32 use64bitTexturing_ ) VULKAN_HPP_NOEXCEPT
{
use64bitTexturing = use64bitTexturing_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCuModuleTexturingModeCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCuModuleTexturingModeCreateInfoNVX*>( this );
}
operator VkCuModuleTexturingModeCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCuModuleTexturingModeCreateInfoNVX*>( this );
}
operator VkCuModuleTexturingModeCreateInfoNVX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCuModuleTexturingModeCreateInfoNVX*>( this );
}
operator VkCuModuleTexturingModeCreateInfoNVX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCuModuleTexturingModeCreateInfoNVX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, use64bitTexturing );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CuModuleTexturingModeCreateInfoNVX const & ) const = default;
#else
bool operator==( CuModuleTexturingModeCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( use64bitTexturing == rhs.use64bitTexturing );
#endif
}
bool operator!=( CuModuleTexturingModeCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleTexturingModeCreateInfoNVX;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 use64bitTexturing = {};
};
template <>
struct CppType<StructureType, StructureType::eCuModuleTexturingModeCreateInfoNVX>
{
using Type = CuModuleTexturingModeCreateInfoNVX;
};
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkCudaFunctionCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaFunctionCreateInfoNV.html
struct CudaFunctionCreateInfoNV
{
using NativeType = VkCudaFunctionCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaFunctionCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CudaFunctionCreateInfoNV(VULKAN_HPP_NAMESPACE::CudaModuleNV module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, module{ module_ }, pName{ pName_ }
{}
VULKAN_HPP_CONSTEXPR CudaFunctionCreateInfoNV( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CudaFunctionCreateInfoNV( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CudaFunctionCreateInfoNV( *reinterpret_cast<CudaFunctionCreateInfoNV const *>( &rhs ) )
{}
CudaFunctionCreateInfoNV & operator=( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CudaFunctionCreateInfoNV & operator=( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setModule( VULKAN_HPP_NAMESPACE::CudaModuleNV module_ ) VULKAN_HPP_NOEXCEPT
{
module = module_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
{
pName = pName_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCudaFunctionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCudaFunctionCreateInfoNV*>( this );
}
operator VkCudaFunctionCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCudaFunctionCreateInfoNV*>( this );
}
operator VkCudaFunctionCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCudaFunctionCreateInfoNV*>( this );
}
operator VkCudaFunctionCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCudaFunctionCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CudaModuleNV const &, const char * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, module, pName );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = module <=> rhs.module; cmp != 0 ) return cmp;
if ( pName != rhs.pName )
if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( module == rhs.module )
&& ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) );
}
bool operator!=( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaFunctionCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::CudaModuleNV module = {};
const char * pName = {};
};
template <>
struct CppType<StructureType, StructureType::eCudaFunctionCreateInfoNV>
{
using Type = CudaFunctionCreateInfoNV;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkCudaLaunchInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaLaunchInfoNV.html
struct CudaLaunchInfoNV
{
using NativeType = VkCudaLaunchInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaLaunchInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV(VULKAN_HPP_NAMESPACE::CudaFunctionNV function_ = {}, uint32_t gridDimX_ = {}, uint32_t gridDimY_ = {}, uint32_t gridDimZ_ = {}, uint32_t blockDimX_ = {}, uint32_t blockDimY_ = {}, uint32_t blockDimZ_ = {}, uint32_t sharedMemBytes_ = {}, size_t paramCount_ = {}, const void * const * pParams_ = {}, size_t extraCount_ = {}, const void * const * pExtras_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, function{ function_ }, gridDimX{ gridDimX_ }, gridDimY{ gridDimY_ }, gridDimZ{ gridDimZ_ }, blockDimX{ blockDimX_ }, blockDimY{ blockDimY_ }, blockDimZ{ blockDimZ_ }, sharedMemBytes{ sharedMemBytes_ }, paramCount{ paramCount_ }, pParams{ pParams_ }, extraCount{ extraCount_ }, pExtras{ pExtras_ }
{}
VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CudaLaunchInfoNV( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CudaLaunchInfoNV( *reinterpret_cast<CudaLaunchInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CudaLaunchInfoNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_, uint32_t gridDimX_, uint32_t gridDimY_, uint32_t gridDimZ_, uint32_t blockDimX_, uint32_t blockDimY_, uint32_t blockDimZ_, uint32_t sharedMemBytes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), function( function_ ), gridDimX( gridDimX_ ), gridDimY( gridDimY_ ), gridDimZ( gridDimZ_ ), blockDimX( blockDimX_ ), blockDimY( blockDimY_ ), blockDimZ( blockDimZ_ ), sharedMemBytes( sharedMemBytes_ ), paramCount( params_.size() ), pParams( params_.data() ), extraCount( extras_.size() ), pExtras( extras_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CudaLaunchInfoNV & operator=( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CudaLaunchInfoNV & operator=( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setFunction( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_ ) VULKAN_HPP_NOEXCEPT
{
function = function_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT
{
gridDimX = gridDimX_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT
{
gridDimY = gridDimY_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT
{
gridDimZ = gridDimZ_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT
{
blockDimX = blockDimX_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT
{
blockDimY = blockDimY_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT
{
blockDimZ = blockDimZ_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT
{
sharedMemBytes = sharedMemBytes_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT
{
paramCount = paramCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT
{
pParams = pParams_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CudaLaunchInfoNV & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_ ) VULKAN_HPP_NOEXCEPT
{
paramCount = params_.size();
pParams = params_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT
{
extraCount = extraCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT
{
pExtras = pExtras_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CudaLaunchInfoNV & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ ) VULKAN_HPP_NOEXCEPT
{
extraCount = extras_.size();
pExtras = extras_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCudaLaunchInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCudaLaunchInfoNV*>( this );
}
operator VkCudaLaunchInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCudaLaunchInfoNV*>( this );
}
operator VkCudaLaunchInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCudaLaunchInfoNV*>( this );
}
operator VkCudaLaunchInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCudaLaunchInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CudaFunctionNV const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, size_t const &, const void * const * const &, size_t const &, const void * const * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CudaLaunchInfoNV const & ) const = default;
#else
bool operator==( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( function == rhs.function )
&& ( gridDimX == rhs.gridDimX )
&& ( gridDimY == rhs.gridDimY )
&& ( gridDimZ == rhs.gridDimZ )
&& ( blockDimX == rhs.blockDimX )
&& ( blockDimY == rhs.blockDimY )
&& ( blockDimZ == rhs.blockDimZ )
&& ( sharedMemBytes == rhs.sharedMemBytes )
&& ( paramCount == rhs.paramCount )
&& ( pParams == rhs.pParams )
&& ( extraCount == rhs.extraCount )
&& ( pExtras == rhs.pExtras );
#endif
}
bool operator!=( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaLaunchInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::CudaFunctionNV function = {};
uint32_t gridDimX = {};
uint32_t gridDimY = {};
uint32_t gridDimZ = {};
uint32_t blockDimX = {};
uint32_t blockDimY = {};
uint32_t blockDimZ = {};
uint32_t sharedMemBytes = {};
size_t paramCount = {};
const void * const * pParams = {};
size_t extraCount = {};
const void * const * pExtras = {};
};
template <>
struct CppType<StructureType, StructureType::eCudaLaunchInfoNV>
{
using Type = CudaLaunchInfoNV;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkCudaModuleCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaModuleCreateInfoNV.html
struct CudaModuleCreateInfoNV
{
using NativeType = VkCudaModuleCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaModuleCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV(size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dataSize{ dataSize_ }, pData{ pData_ }
{}
VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CudaModuleCreateInfoNV( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CudaModuleCreateInfoNV( *reinterpret_cast<CudaModuleCreateInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
CudaModuleCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_, const void * pNext_ = nullptr )
: pNext( pNext_ ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
CudaModuleCreateInfoNV & operator=( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
CudaModuleCreateInfoNV & operator=( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = dataSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
{
pData = pData_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
CudaModuleCreateInfoNV & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = data_.size() * sizeof(T);
pData = data_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkCudaModuleCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCudaModuleCreateInfoNV*>( this );
}
operator VkCudaModuleCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCudaModuleCreateInfoNV*>( this );
}
operator VkCudaModuleCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkCudaModuleCreateInfoNV*>( this );
}
operator VkCudaModuleCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkCudaModuleCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, size_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dataSize, pData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( CudaModuleCreateInfoNV const & ) const = default;
#else
bool operator==( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dataSize == rhs.dataSize )
&& ( pData == rhs.pData );
#endif
}
bool operator!=( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaModuleCreateInfoNV;
const void * pNext = {};
size_t dataSize = {};
const void * pData = {};
};
template <>
struct CppType<StructureType, StructureType::eCudaModuleCreateInfoNV>
{
using Type = CudaModuleCreateInfoNV;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkD3D12FenceSubmitInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkD3D12FenceSubmitInfoKHR.html
struct D3D12FenceSubmitInfoKHR
{
using NativeType = VkD3D12FenceSubmitInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t * pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t * pSignalSemaphoreValues_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, waitSemaphoreValuesCount{ waitSemaphoreValuesCount_ }, pWaitSemaphoreValues{ pWaitSemaphoreValues_ }, signalSemaphoreValuesCount{ signalSemaphoreValuesCount_ }, pSignalSemaphoreValues{ pSignalSemaphoreValues_ }
{}
VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: D3D12FenceSubmitInfoKHR( *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphoreValues = pWaitSemaphoreValues_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
pWaitSemaphoreValues = waitSemaphoreValues_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphoreValues = pSignalSemaphoreValues_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
pSignalSemaphoreValues = signalSemaphoreValues_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>( this );
}
operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>( this );
}
operator VkD3D12FenceSubmitInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>( this );
}
operator VkD3D12FenceSubmitInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &, uint32_t const &, const uint64_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, waitSemaphoreValuesCount, pWaitSemaphoreValues, signalSemaphoreValuesCount, pSignalSemaphoreValues );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default;
#else
bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
&& ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
&& ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
&& ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
#endif
}
bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
const void * pNext = {};
uint32_t waitSemaphoreValuesCount = {};
const uint64_t * pWaitSemaphoreValues = {};
uint32_t signalSemaphoreValuesCount = {};
const uint64_t * pSignalSemaphoreValues = {};
};
template <>
struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
{
using Type = D3D12FenceSubmitInfoKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
// wrapper struct for struct VkDebugMarkerMarkerInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugMarkerMarkerInfoEXT.html
struct DebugMarkerMarkerInfoEXT
{
using NativeType = VkDebugMarkerMarkerInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT(const char * pMarkerName_ = {}, std::array<float,4> const & color_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pMarkerName{ pMarkerName_ }, color{ color_ }
{}
VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugMarkerMarkerInfoEXT( *reinterpret_cast<DebugMarkerMarkerInfoEXT const *>( &rhs ) )
{}
DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT
{
pMarkerName = pMarkerName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
{
color = color_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( this );
}
operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( this );
}
operator VkDebugMarkerMarkerInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( this );
}
operator VkDebugMarkerMarkerInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pMarkerName, color );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::partial_ordering operator<=>( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( pMarkerName != rhs.pMarkerName )
if ( auto cmp = strcmp( pMarkerName, rhs.pMarkerName ); cmp != 0 )
return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
if ( auto cmp = color <=> rhs.color; cmp != 0 ) return cmp;
return std::partial_ordering::equivalent;
}
#endif
bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) )
&& ( color == rhs.color );
}
bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
const void * pNext = {};
const char * pMarkerName = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
};
template <>
struct CppType<StructureType, StructureType::eDebugMarkerMarkerInfoEXT>
{
using Type = DebugMarkerMarkerInfoEXT;
};
// wrapper struct for struct VkDebugMarkerObjectNameInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugMarkerObjectNameInfoEXT.html
struct DebugMarkerObjectNameInfoEXT
{
using NativeType = VkDebugMarkerObjectNameInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, const char * pObjectName_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, objectType{ objectType_ }, object{ object_ }, pObjectName{ pObjectName_ }
{}
VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugMarkerObjectNameInfoEXT( *reinterpret_cast<DebugMarkerObjectNameInfoEXT const *>( &rhs ) )
{}
DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
{
objectType = objectType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
{
object = object_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
{
pObjectName = pObjectName_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( this );
}
operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( this );
}
operator VkDebugMarkerObjectNameInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( this );
}
operator VkDebugMarkerObjectNameInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT const &, uint64_t const &, const char * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, objectType, object, pObjectName );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) return cmp;
if ( auto cmp = object <=> rhs.object; cmp != 0 ) return cmp;
if ( pObjectName != rhs.pObjectName )
if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( objectType == rhs.objectType )
&& ( object == rhs.object )
&& ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
}
bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
uint64_t object = {};
const char * pObjectName = {};
};
template <>
struct CppType<StructureType, StructureType::eDebugMarkerObjectNameInfoEXT>
{
using Type = DebugMarkerObjectNameInfoEXT;
};
// wrapper struct for struct VkDebugMarkerObjectTagInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugMarkerObjectTagInfoEXT.html
struct DebugMarkerObjectTagInfoEXT
{
using NativeType = VkDebugMarkerObjectTagInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void * pTag_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, objectType{ objectType_ }, object{ object_ }, tagName{ tagName_ }, tagSize{ tagSize_ }, pTag{ pTag_ }
{}
VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugMarkerObjectTagInfoEXT( *reinterpret_cast<DebugMarkerObjectTagInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_, const void * pNext_ = nullptr )
: pNext( pNext_ ), objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
{
objectType = objectType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
{
object = object_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
{
tagName = tagName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
{
tagSize = tagSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
{
pTag = pTag_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
{
tagSize = tag_.size() * sizeof(T);
pTag = tag_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( this );
}
operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( this );
}
operator VkDebugMarkerObjectTagInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( this );
}
operator VkDebugMarkerObjectTagInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT const &, uint64_t const &, uint64_t const &, size_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, objectType, object, tagName, tagSize, pTag );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default;
#else
bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( objectType == rhs.objectType )
&& ( object == rhs.object )
&& ( tagName == rhs.tagName )
&& ( tagSize == rhs.tagSize )
&& ( pTag == rhs.pTag );
#endif
}
bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
uint64_t object = {};
uint64_t tagName = {};
size_t tagSize = {};
const void * pTag = {};
};
template <>
struct CppType<StructureType, StructureType::eDebugMarkerObjectTagInfoEXT>
{
using Type = DebugMarkerObjectTagInfoEXT;
};
typedef VULKAN_HPP_NAMESPACE::Bool32 (VKAPI_PTR *PFN_DebugReportCallbackEXT)( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char * pLayerPrefix, const char * pMessage, void * pUserData );
// wrapper struct for struct VkDebugReportCallbackCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugReportCallbackCreateInfoEXT.html
struct DebugReportCallbackCreateInfoEXT
{
using NativeType = VkDebugReportCallbackCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT pfnCallback_ = {}, void * pUserData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pfnCallback{ pfnCallback_ }, pUserData{ pUserData_ }
{}
VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugReportCallbackCreateInfoEXT( *reinterpret_cast<DebugReportCallbackCreateInfoEXT const *>( &rhs ) )
{}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic push
# if defined( __clang__ )
# pragma clang diagnostic ignored "-Wunknown-warning-option"
# endif
# pragma GCC diagnostic ignored "-Wcast-function-type"
#endif
VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." )
DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_,
PFN_vkDebugReportCallbackEXT pfnCallback_,
void * pUserData_ = {},
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: DebugReportCallbackCreateInfoEXT( flags_, reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT>( pfnCallback_ ), pUserData_, pNext_ )
{
}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic pop
#endif
DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPfnCallback( VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
{
pfnCallback = pfnCallback_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
{
pUserData = pUserData_;
return *this;
}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic push
# if defined( __clang__ )
# pragma clang diagnostic ignored "-Wunknown-warning-option"
# endif
# pragma GCC diagnostic ignored "-Wcast-function-type"
#endif
VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )
DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
{
return setPfnCallback( reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT>( pfnCallback_ ) );
}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic pop
#endif
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( this );
}
operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>( this );
}
operator VkDebugReportCallbackCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( this );
}
operator VkDebugReportCallbackCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT const &, VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pfnCallback, pUserData );
}
#endif
bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( pfnCallback == rhs.pfnCallback )
&& ( pUserData == rhs.pUserData );
#endif
}
bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT pfnCallback = {};
void * pUserData = {};
};
template <>
struct CppType<StructureType, StructureType::eDebugReportCallbackCreateInfoEXT>
{
using Type = DebugReportCallbackCreateInfoEXT;
};
// wrapper struct for struct VkDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsLabelEXT.html
struct DebugUtilsLabelEXT
{
using NativeType = VkDebugUtilsLabelEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(const char * pLabelName_ = {}, std::array<float,4> const & color_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pLabelName{ pLabelName_ }, color{ color_ }
{}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) )
{}
DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT
{
pLabelName = pLabelName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
{
color = color_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsLabelEXT*>( this );
}
operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugUtilsLabelEXT*>( this );
}
operator VkDebugUtilsLabelEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDebugUtilsLabelEXT*>( this );
}
operator VkDebugUtilsLabelEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDebugUtilsLabelEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pLabelName, color );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::partial_ordering operator<=>( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( pLabelName != rhs.pLabelName )
if ( auto cmp = strcmp( pLabelName, rhs.pLabelName ); cmp != 0 )
return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
if ( auto cmp = color <=> rhs.color; cmp != 0 ) return cmp;
return std::partial_ordering::equivalent;
}
#endif
bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) )
&& ( color == rhs.color );
}
bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
const void * pNext = {};
const char * pLabelName = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
};
template <>
struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
{
using Type = DebugUtilsLabelEXT;
};
// wrapper struct for struct VkDebugUtilsObjectNameInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsObjectNameInfoEXT.html
struct DebugUtilsObjectNameInfoEXT
{
using NativeType = VkDebugUtilsObjectNameInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, const char * pObjectName_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, objectType{ objectType_ }, objectHandle{ objectHandle_ }, pObjectName{ pObjectName_ }
{}
VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugUtilsObjectNameInfoEXT( *reinterpret_cast<DebugUtilsObjectNameInfoEXT const *>( &rhs ) )
{}
DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
{
objectType = objectType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
{
objectHandle = objectHandle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
{
pObjectName = pObjectName_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( this );
}
operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>( this );
}
operator VkDebugUtilsObjectNameInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( this );
}
operator VkDebugUtilsObjectNameInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, const char * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, objectType, objectHandle, pObjectName );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) return cmp;
if ( auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0 ) return cmp;
if ( pObjectName != rhs.pObjectName )
if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( objectType == rhs.objectType )
&& ( objectHandle == rhs.objectHandle )
&& ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
}
bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
uint64_t objectHandle = {};
const char * pObjectName = {};
};
template <>
struct CppType<StructureType, StructureType::eDebugUtilsObjectNameInfoEXT>
{
using Type = DebugUtilsObjectNameInfoEXT;
};
// wrapper struct for struct VkDebugUtilsMessengerCallbackDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessengerCallbackDataEXT.html
struct DebugUtilsMessengerCallbackDataEXT
{
using NativeType = VkDebugUtilsMessengerCallbackDataEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, const char * pMessageIdName_ = {}, int32_t messageIdNumber_ = {}, const char * pMessage_ = {}, uint32_t queueLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {}, uint32_t cmdBufLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {}, uint32_t objectCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pMessageIdName{ pMessageIdName_ }, messageIdNumber{ messageIdNumber_ }, pMessage{ pMessage_ }, queueLabelCount{ queueLabelCount_ }, pQueueLabels{ pQueueLabels_ }, cmdBufLabelCount{ cmdBufLabelCount_ }, pCmdBufLabels{ pCmdBufLabels_ }, objectCount{ objectCount_ }, pObjects{ pObjects_ }
{}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast<DebugUtilsMessengerCallbackDataEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, const char * pMessageIdName_, int32_t messageIdNumber_, const char * pMessage_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( static_cast<uint32_t>( queueLabels_.size() ) ), pQueueLabels( queueLabels_.data() ), cmdBufLabelCount( static_cast<uint32_t>( cmdBufLabels_.size() ) ), pCmdBufLabels( cmdBufLabels_.data() ), objectCount( static_cast<uint32_t>( objects_.size() ) ), pObjects( objects_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
{
pMessageIdName = pMessageIdName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
{
messageIdNumber = messageIdNumber_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT
{
pMessage = pMessage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
{
queueLabelCount = queueLabelCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
{
pQueueLabels = pQueueLabels_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DebugUtilsMessengerCallbackDataEXT & setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_ ) VULKAN_HPP_NOEXCEPT
{
queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
pQueueLabels = queueLabels_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
{
cmdBufLabelCount = cmdBufLabelCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
{
pCmdBufLabels = pCmdBufLabels_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
{
cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
pCmdBufLabels = cmdBufLabels_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
{
objectCount = objectCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT
{
pObjects = pObjects_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DebugUtilsMessengerCallbackDataEXT & setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ ) VULKAN_HPP_NOEXCEPT
{
objectCount = static_cast<uint32_t>( objects_.size() );
pObjects = objects_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( this );
}
operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>( this );
}
operator VkDebugUtilsMessengerCallbackDataEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( this );
}
operator VkDebugUtilsMessengerCallbackDataEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT const &, const char * const &, int32_t const &, const char * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pMessageIdName, messageIdNumber, pMessage, queueLabelCount, pQueueLabels, cmdBufLabelCount, pCmdBufLabels, objectCount, pObjects );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( pMessageIdName != rhs.pMessageIdName )
if ( auto cmp = strcmp( pMessageIdName, rhs.pMessageIdName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0 ) return cmp;
if ( pMessage != rhs.pMessage )
if ( auto cmp = strcmp( pMessage, rhs.pMessage ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0 ) return cmp;
if ( auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0 ) return cmp;
if ( auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0 ) return cmp;
if ( auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0 ) return cmp;
if ( auto cmp = objectCount <=> rhs.objectCount; cmp != 0 ) return cmp;
if ( auto cmp = pObjects <=> rhs.pObjects; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) )
&& ( messageIdNumber == rhs.messageIdNumber )
&& ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) )
&& ( queueLabelCount == rhs.queueLabelCount )
&& ( pQueueLabels == rhs.pQueueLabels )
&& ( cmdBufLabelCount == rhs.cmdBufLabelCount )
&& ( pCmdBufLabels == rhs.pCmdBufLabels )
&& ( objectCount == rhs.objectCount )
&& ( pObjects == rhs.pObjects );
}
bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {};
const char * pMessageIdName = {};
int32_t messageIdNumber = {};
const char * pMessage = {};
uint32_t queueLabelCount = {};
const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels = {};
uint32_t cmdBufLabelCount = {};
const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels = {};
uint32_t objectCount = {};
const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {};
};
template <>
struct CppType<StructureType, StructureType::eDebugUtilsMessengerCallbackDataEXT>
{
using Type = DebugUtilsMessengerCallbackDataEXT;
};
typedef VULKAN_HPP_NAMESPACE::Bool32 (VKAPI_PTR *PFN_DebugUtilsMessengerCallbackEXT)( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, void * pUserData );
// wrapper struct for struct VkDebugUtilsMessengerCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessengerCreateInfoEXT.html
struct DebugUtilsMessengerCreateInfoEXT
{
using NativeType = VkDebugUtilsMessengerCreateInfoEXT;
static const bool allowDuplicate = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, void * pUserData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, messageSeverity{ messageSeverity_ }, messageType{ messageType_ }, pfnUserCallback{ pfnUserCallback_ }, pUserData{ pUserData_ }
{}
VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast<DebugUtilsMessengerCreateInfoEXT const *>( &rhs ) )
{}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic push
# if defined( __clang__ )
# pragma clang diagnostic ignored "-Wunknown-warning-option"
# endif
# pragma GCC diagnostic ignored "-Wcast-function-type"
#endif
VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." )
DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_,
VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_,
VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_,
PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_,
void * pUserData_ = {},
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: DebugUtilsMessengerCreateInfoEXT( flags_, messageSeverity_, messageType_, reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT>( pfnUserCallback_ ), pUserData_, pNext_ )
{
}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic pop
#endif
DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
{
messageSeverity = messageSeverity_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
{
messageType = messageType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
{
pfnUserCallback = pfnUserCallback_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
{
pUserData = pUserData_;
return *this;
}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic push
# if defined( __clang__ )
# pragma clang diagnostic ignored "-Wunknown-warning-option"
# endif
# pragma GCC diagnostic ignored "-Wcast-function-type"
#endif
VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )
DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
{
return setPfnUserCallback( reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT>( pfnUserCallback_ ) );
}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic pop
#endif
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( this );
}
operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>( this );
}
operator VkDebugUtilsMessengerCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( this );
}
operator VkDebugUtilsMessengerCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT const &, VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, messageSeverity, messageType, pfnUserCallback, pUserData );
}
#endif
bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( messageSeverity == rhs.messageSeverity )
&& ( messageType == rhs.messageType )
&& ( pfnUserCallback == rhs.pfnUserCallback )
&& ( pUserData == rhs.pUserData );
#endif
}
bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {};
VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {};
VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback = {};
void * pUserData = {};
};
template <>
struct CppType<StructureType, StructureType::eDebugUtilsMessengerCreateInfoEXT>
{
using Type = DebugUtilsMessengerCreateInfoEXT;
};
// wrapper struct for struct VkDebugUtilsObjectTagInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsObjectTagInfoEXT.html
struct DebugUtilsObjectTagInfoEXT
{
using NativeType = VkDebugUtilsObjectTagInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void * pTag_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, objectType{ objectType_ }, objectHandle{ objectHandle_ }, tagName{ tagName_ }, tagSize{ tagSize_ }, pTag{ pTag_ }
{}
VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugUtilsObjectTagInfoEXT( *reinterpret_cast<DebugUtilsObjectTagInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_, const void * pNext_ = nullptr )
: pNext( pNext_ ), objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
{
objectType = objectType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
{
objectHandle = objectHandle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
{
tagName = tagName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
{
tagSize = tagSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
{
pTag = pTag_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
{
tagSize = tag_.size() * sizeof(T);
pTag = tag_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( this );
}
operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>( this );
}
operator VkDebugUtilsObjectTagInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( this );
}
operator VkDebugUtilsObjectTagInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, uint64_t const &, size_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, objectType, objectHandle, tagName, tagSize, pTag );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default;
#else
bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( objectType == rhs.objectType )
&& ( objectHandle == rhs.objectHandle )
&& ( tagName == rhs.tagName )
&& ( tagSize == rhs.tagSize )
&& ( pTag == rhs.pTag );
#endif
}
bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
uint64_t objectHandle = {};
uint64_t tagName = {};
size_t tagSize = {};
const void * pTag = {};
};
template <>
struct CppType<StructureType, StructureType::eDebugUtilsObjectTagInfoEXT>
{
using Type = DebugUtilsObjectTagInfoEXT;
};
// wrapper struct for struct VkDecompressMemoryRegionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDecompressMemoryRegionNV.html
struct DecompressMemoryRegionNV
{
using NativeType = VkDecompressMemoryRegionNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV(VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ = {}, VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ = {}) VULKAN_HPP_NOEXCEPT
: srcAddress{ srcAddress_ }, dstAddress{ dstAddress_ }, compressedSize{ compressedSize_ }, decompressedSize{ decompressedSize_ }, decompressionMethod{ decompressionMethod_ }
{}
VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DecompressMemoryRegionNV( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT
: DecompressMemoryRegionNV( *reinterpret_cast<DecompressMemoryRegionNV const *>( &rhs ) )
{}
DecompressMemoryRegionNV & operator=( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DecompressMemoryRegionNV & operator=( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT
{
srcAddress = srcAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDstAddress( VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT
{
dstAddress = dstAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setCompressedSize( VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ ) VULKAN_HPP_NOEXCEPT
{
compressedSize = compressedSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDecompressedSize( VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ ) VULKAN_HPP_NOEXCEPT
{
decompressedSize = decompressedSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDecompressionMethod( VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ ) VULKAN_HPP_NOEXCEPT
{
decompressionMethod = decompressionMethod_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDecompressMemoryRegionNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDecompressMemoryRegionNV*>( this );
}
operator VkDecompressMemoryRegionNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDecompressMemoryRegionNV*>( this );
}
operator VkDecompressMemoryRegionNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDecompressMemoryRegionNV*>( this );
}
operator VkDecompressMemoryRegionNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDecompressMemoryRegionNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( srcAddress, dstAddress, compressedSize, decompressedSize, decompressionMethod );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DecompressMemoryRegionNV const & ) const = default;
#else
bool operator==( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( srcAddress == rhs.srcAddress )
&& ( dstAddress == rhs.dstAddress )
&& ( compressedSize == rhs.compressedSize )
&& ( decompressedSize == rhs.decompressedSize )
&& ( decompressionMethod == rhs.decompressionMethod );
#endif
}
bool operator!=( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {};
VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize compressedSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize = {};
VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod = {};
};
// wrapper struct for struct VkDedicatedAllocationBufferCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDedicatedAllocationBufferCreateInfoNV.html
struct DedicatedAllocationBufferCreateInfoNV
{
using NativeType = VkDedicatedAllocationBufferCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dedicatedAllocation{ dedicatedAllocation_ }
{}
VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast<DedicatedAllocationBufferCreateInfoNV const *>( &rhs ) )
{}
DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
{
dedicatedAllocation = dedicatedAllocation_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>( this );
}
operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>( this );
}
operator VkDedicatedAllocationBufferCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>( this );
}
operator VkDedicatedAllocationBufferCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dedicatedAllocation );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default;
#else
bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dedicatedAllocation == rhs.dedicatedAllocation );
#endif
}
bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
};
template <>
struct CppType<StructureType, StructureType::eDedicatedAllocationBufferCreateInfoNV>
{
using Type = DedicatedAllocationBufferCreateInfoNV;
};
// wrapper struct for struct VkDedicatedAllocationImageCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDedicatedAllocationImageCreateInfoNV.html
struct DedicatedAllocationImageCreateInfoNV
{
using NativeType = VkDedicatedAllocationImageCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dedicatedAllocation{ dedicatedAllocation_ }
{}
VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: DedicatedAllocationImageCreateInfoNV( *reinterpret_cast<DedicatedAllocationImageCreateInfoNV const *>( &rhs ) )
{}
DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
{
dedicatedAllocation = dedicatedAllocation_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>( this );
}
operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>( this );
}
operator VkDedicatedAllocationImageCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>( this );
}
operator VkDedicatedAllocationImageCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dedicatedAllocation );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default;
#else
bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dedicatedAllocation == rhs.dedicatedAllocation );
#endif
}
bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
};
template <>
struct CppType<StructureType, StructureType::eDedicatedAllocationImageCreateInfoNV>
{
using Type = DedicatedAllocationImageCreateInfoNV;
};
// wrapper struct for struct VkDedicatedAllocationMemoryAllocateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDedicatedAllocationMemoryAllocateInfoNV.html
struct DedicatedAllocationMemoryAllocateInfoNV
{
using NativeType = VkDedicatedAllocationMemoryAllocateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, image{ image_ }, buffer{ buffer_ }
{}
VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast<DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs ) )
{}
DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
}
operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
}
operator VkDedicatedAllocationMemoryAllocateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
}
operator VkDedicatedAllocationMemoryAllocateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::Buffer const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, image, buffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default;
#else
bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( image == rhs.image )
&& ( buffer == rhs.buffer );
#endif
}
bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
template <>
struct CppType<StructureType, StructureType::eDedicatedAllocationMemoryAllocateInfoNV>
{
using Type = DedicatedAllocationMemoryAllocateInfoNV;
};
// wrapper struct for struct VkMemoryBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryBarrier2.html
struct MemoryBarrier2
{
using NativeType = VkMemoryBarrier2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryBarrier2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcStageMask{ srcStageMask_ }, srcAccessMask{ srcAccessMask_ }, dstStageMask{ dstStageMask_ }, dstAccessMask{ dstAccessMask_ }
{}
VULKAN_HPP_CONSTEXPR MemoryBarrier2( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryBarrier2( *reinterpret_cast<MemoryBarrier2 const *>( &rhs ) )
{}
MemoryBarrier2 & operator=( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryBarrier2 & operator=( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryBarrier2*>( this );
}
operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryBarrier2*>( this );
}
operator VkMemoryBarrier2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryBarrier2*>( this );
}
operator VkMemoryBarrier2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryBarrier2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryBarrier2 const & ) const = default;
#else
bool operator==( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcStageMask == rhs.srcStageMask )
&& ( srcAccessMask == rhs.srcAccessMask )
&& ( dstStageMask == rhs.dstStageMask )
&& ( dstAccessMask == rhs.dstAccessMask );
#endif
}
bool operator!=( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryBarrier2>
{
using Type = MemoryBarrier2;
};
using MemoryBarrier2KHR = MemoryBarrier2;
// wrapper struct for struct VkImageSubresourceRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresourceRange.html
struct ImageSubresourceRange
{
using NativeType = VkImageSubresourceRange;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageSubresourceRange(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
: aspectMask{ aspectMask_ }, baseMipLevel{ baseMipLevel_ }, levelCount{ levelCount_ }, baseArrayLayer{ baseArrayLayer_ }, layerCount{ layerCount_ }
{}
VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageSubresourceRange( *reinterpret_cast<ImageSubresourceRange const *>( &rhs ) )
{}
ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
{
baseMipLevel = baseMipLevel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
{
levelCount = levelCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
{
baseArrayLayer = baseArrayLayer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
{
layerCount = layerCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageSubresourceRange*>( this );
}
operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageSubresourceRange*>( this );
}
operator VkImageSubresourceRange const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageSubresourceRange*>( this );
}
operator VkImageSubresourceRange *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageSubresourceRange*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( aspectMask, baseMipLevel, levelCount, baseArrayLayer, layerCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageSubresourceRange const & ) const = default;
#else
bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( aspectMask == rhs.aspectMask )
&& ( baseMipLevel == rhs.baseMipLevel )
&& ( levelCount == rhs.levelCount )
&& ( baseArrayLayer == rhs.baseArrayLayer )
&& ( layerCount == rhs.layerCount );
#endif
}
bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
uint32_t baseMipLevel = {};
uint32_t levelCount = {};
uint32_t baseArrayLayer = {};
uint32_t layerCount = {};
};
// wrapper struct for struct VkImageMemoryBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageMemoryBarrier2.html
struct ImageMemoryBarrier2
{
using NativeType = VkImageMemoryBarrier2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcStageMask{ srcStageMask_ }, srcAccessMask{ srcAccessMask_ }, dstStageMask{ dstStageMask_ }, dstAccessMask{ dstAccessMask_ }, oldLayout{ oldLayout_ }, newLayout{ newLayout_ }, srcQueueFamilyIndex{ srcQueueFamilyIndex_ }, dstQueueFamilyIndex{ dstQueueFamilyIndex_ }, image{ image_ }, subresourceRange{ subresourceRange_ }
{}
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageMemoryBarrier2( *reinterpret_cast<ImageMemoryBarrier2 const *>( &rhs ) )
{}
ImageMemoryBarrier2 & operator=( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageMemoryBarrier2 & operator=( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
{
oldLayout = oldLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
{
newLayout = newLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
srcQueueFamilyIndex = srcQueueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
dstQueueFamilyIndex = dstQueueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
{
subresourceRange = subresourceRange_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageMemoryBarrier2*>( this );
}
operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageMemoryBarrier2*>( this );
}
operator VkImageMemoryBarrier2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageMemoryBarrier2*>( this );
}
operator VkImageMemoryBarrier2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageMemoryBarrier2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageMemoryBarrier2 const & ) const = default;
#else
bool operator==( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcStageMask == rhs.srcStageMask )
&& ( srcAccessMask == rhs.srcAccessMask )
&& ( dstStageMask == rhs.dstStageMask )
&& ( dstAccessMask == rhs.dstAccessMask )
&& ( oldLayout == rhs.oldLayout )
&& ( newLayout == rhs.newLayout )
&& ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
&& ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
&& ( image == rhs.image )
&& ( subresourceRange == rhs.subresourceRange );
#endif
}
bool operator!=( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
uint32_t srcQueueFamilyIndex = {};
uint32_t dstQueueFamilyIndex = {};
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
};
template <>
struct CppType<StructureType, StructureType::eImageMemoryBarrier2>
{
using Type = ImageMemoryBarrier2;
};
using ImageMemoryBarrier2KHR = ImageMemoryBarrier2;
// wrapper struct for struct VkDependencyInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDependencyInfo.html
struct DependencyInfo
{
using NativeType = VkDependencyInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DependencyInfo(VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, uint32_t memoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ = {}, uint32_t bufferMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ = {}, uint32_t imageMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dependencyFlags{ dependencyFlags_ }, memoryBarrierCount{ memoryBarrierCount_ }, pMemoryBarriers{ pMemoryBarriers_ }, bufferMemoryBarrierCount{ bufferMemoryBarrierCount_ }, pBufferMemoryBarriers{ pBufferMemoryBarriers_ }, imageMemoryBarrierCount{ imageMemoryBarrierCount_ }, pImageMemoryBarriers{ pImageMemoryBarriers_ }
{}
VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DependencyInfo( *reinterpret_cast<DependencyInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2> const & memoryBarriers_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2> const & bufferMemoryBarriers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2> const & imageMemoryBarriers_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), dependencyFlags( dependencyFlags_ ), memoryBarrierCount( static_cast<uint32_t>( memoryBarriers_.size() ) ), pMemoryBarriers( memoryBarriers_.data() ), bufferMemoryBarrierCount( static_cast<uint32_t>( bufferMemoryBarriers_.size() ) ), pBufferMemoryBarriers( bufferMemoryBarriers_.data() ), imageMemoryBarrierCount( static_cast<uint32_t>( imageMemoryBarriers_.size() ) ), pImageMemoryBarriers( imageMemoryBarriers_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DependencyInfo & operator=( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DependencyInfo & operator=( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DependencyInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
{
dependencyFlags = dependencyFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
{
memoryBarrierCount = memoryBarrierCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
pMemoryBarriers = pMemoryBarriers_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DependencyInfo & setMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2> const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
memoryBarrierCount = static_cast<uint32_t>( memoryBarriers_.size() );
pMemoryBarriers = memoryBarriers_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
{
bufferMemoryBarrierCount = bufferMemoryBarrierCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPBufferMemoryBarriers( const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
pBufferMemoryBarriers = pBufferMemoryBarriers_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DependencyInfo & setBufferMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2> const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
bufferMemoryBarrierCount = static_cast<uint32_t>( bufferMemoryBarriers_.size() );
pBufferMemoryBarriers = bufferMemoryBarriers_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
{
imageMemoryBarrierCount = imageMemoryBarrierCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPImageMemoryBarriers( const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
pImageMemoryBarriers = pImageMemoryBarriers_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DependencyInfo & setImageMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2> const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
imageMemoryBarrierCount = static_cast<uint32_t>( imageMemoryBarriers_.size() );
pImageMemoryBarriers = imageMemoryBarriers_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDependencyInfo*>( this );
}
operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDependencyInfo*>( this );
}
operator VkDependencyInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDependencyInfo*>( this );
}
operator VkDependencyInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDependencyInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DependencyFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DependencyInfo const & ) const = default;
#else
bool operator==( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dependencyFlags == rhs.dependencyFlags )
&& ( memoryBarrierCount == rhs.memoryBarrierCount )
&& ( pMemoryBarriers == rhs.pMemoryBarriers )
&& ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount )
&& ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers )
&& ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount )
&& ( pImageMemoryBarriers == rhs.pImageMemoryBarriers );
#endif
}
bool operator!=( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
uint32_t memoryBarrierCount = {};
const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers = {};
uint32_t bufferMemoryBarrierCount = {};
const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers = {};
uint32_t imageMemoryBarrierCount = {};
const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers = {};
};
template <>
struct CppType<StructureType, StructureType::eDependencyInfo>
{
using Type = DependencyInfo;
};
using DependencyInfoKHR = DependencyInfo;
// wrapper struct for struct VkDepthBiasInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthBiasInfoEXT.html
struct DepthBiasInfoEXT
{
using NativeType = VkDepthBiasInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDepthBiasInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DepthBiasInfoEXT(float depthBiasConstantFactor_ = {}, float depthBiasClamp_ = {}, float depthBiasSlopeFactor_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, depthBiasConstantFactor{ depthBiasConstantFactor_ }, depthBiasClamp{ depthBiasClamp_ }, depthBiasSlopeFactor{ depthBiasSlopeFactor_ }
{}
VULKAN_HPP_CONSTEXPR DepthBiasInfoEXT( DepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DepthBiasInfoEXT( VkDepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DepthBiasInfoEXT( *reinterpret_cast<DepthBiasInfoEXT const *>( &rhs ) )
{}
DepthBiasInfoEXT & operator=( DepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DepthBiasInfoEXT & operator=( VkDepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasConstantFactor = depthBiasConstantFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasClamp = depthBiasClamp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasSlopeFactor = depthBiasSlopeFactor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDepthBiasInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDepthBiasInfoEXT*>( this );
}
operator VkDepthBiasInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDepthBiasInfoEXT*>( this );
}
operator VkDepthBiasInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDepthBiasInfoEXT*>( this );
}
operator VkDepthBiasInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDepthBiasInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, float const &, float const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DepthBiasInfoEXT const & ) const = default;
#else
bool operator==( DepthBiasInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
&& ( depthBiasClamp == rhs.depthBiasClamp )
&& ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor );
#endif
}
bool operator!=( DepthBiasInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDepthBiasInfoEXT;
const void * pNext = {};
float depthBiasConstantFactor = {};
float depthBiasClamp = {};
float depthBiasSlopeFactor = {};
};
template <>
struct CppType<StructureType, StructureType::eDepthBiasInfoEXT>
{
using Type = DepthBiasInfoEXT;
};
// wrapper struct for struct VkDepthBiasRepresentationInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthBiasRepresentationInfoEXT.html
struct DepthBiasRepresentationInfoEXT
{
using NativeType = VkDepthBiasRepresentationInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDepthBiasRepresentationInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DepthBiasRepresentationInfoEXT(VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT depthBiasRepresentation_ = VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT::eLeastRepresentableValueFormat, VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, depthBiasRepresentation{ depthBiasRepresentation_ }, depthBiasExact{ depthBiasExact_ }
{}
VULKAN_HPP_CONSTEXPR DepthBiasRepresentationInfoEXT( DepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DepthBiasRepresentationInfoEXT( VkDepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DepthBiasRepresentationInfoEXT( *reinterpret_cast<DepthBiasRepresentationInfoEXT const *>( &rhs ) )
{}
DepthBiasRepresentationInfoEXT & operator=( DepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DepthBiasRepresentationInfoEXT & operator=( VkDepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setDepthBiasRepresentation( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT depthBiasRepresentation_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasRepresentation = depthBiasRepresentation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setDepthBiasExact( VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasExact = depthBiasExact_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDepthBiasRepresentationInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDepthBiasRepresentationInfoEXT*>( this );
}
operator VkDepthBiasRepresentationInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDepthBiasRepresentationInfoEXT*>( this );
}
operator VkDepthBiasRepresentationInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDepthBiasRepresentationInfoEXT*>( this );
}
operator VkDepthBiasRepresentationInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDepthBiasRepresentationInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, depthBiasRepresentation, depthBiasExact );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DepthBiasRepresentationInfoEXT const & ) const = default;
#else
bool operator==( DepthBiasRepresentationInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( depthBiasRepresentation == rhs.depthBiasRepresentation )
&& ( depthBiasExact == rhs.depthBiasExact );
#endif
}
bool operator!=( DepthBiasRepresentationInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDepthBiasRepresentationInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT depthBiasRepresentation = VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT::eLeastRepresentableValueFormat;
VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact = {};
};
template <>
struct CppType<StructureType, StructureType::eDepthBiasRepresentationInfoEXT>
{
using Type = DepthBiasRepresentationInfoEXT;
};
// wrapper struct for struct VkDepthClampRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthClampRangeEXT.html
struct DepthClampRangeEXT
{
using NativeType = VkDepthClampRangeEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DepthClampRangeEXT(float minDepthClamp_ = {}, float maxDepthClamp_ = {}) VULKAN_HPP_NOEXCEPT
: minDepthClamp{ minDepthClamp_ }, maxDepthClamp{ maxDepthClamp_ }
{}
VULKAN_HPP_CONSTEXPR DepthClampRangeEXT( DepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DepthClampRangeEXT( VkDepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DepthClampRangeEXT( *reinterpret_cast<DepthClampRangeEXT const *>( &rhs ) )
{}
DepthClampRangeEXT & operator=( DepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DepthClampRangeEXT & operator=( VkDepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DepthClampRangeEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT & setMinDepthClamp( float minDepthClamp_ ) VULKAN_HPP_NOEXCEPT
{
minDepthClamp = minDepthClamp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT & setMaxDepthClamp( float maxDepthClamp_ ) VULKAN_HPP_NOEXCEPT
{
maxDepthClamp = maxDepthClamp_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDepthClampRangeEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDepthClampRangeEXT*>( this );
}
operator VkDepthClampRangeEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDepthClampRangeEXT*>( this );
}
operator VkDepthClampRangeEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDepthClampRangeEXT*>( this );
}
operator VkDepthClampRangeEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDepthClampRangeEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<float const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( minDepthClamp, maxDepthClamp );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DepthClampRangeEXT const & ) const = default;
#else
bool operator==( DepthClampRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( minDepthClamp == rhs.minDepthClamp )
&& ( maxDepthClamp == rhs.maxDepthClamp );
#endif
}
bool operator!=( DepthClampRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
float minDepthClamp = {};
float maxDepthClamp = {};
};
// wrapper struct for struct VkDescriptorAddressInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorAddressInfoEXT.html
struct DescriptorAddressInfoEXT
{
using NativeType = VkDescriptorAddressInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorAddressInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT(VULKAN_HPP_NAMESPACE::DeviceAddress address_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, address{ address_ }, range{ range_ }, format{ format_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorAddressInfoEXT( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorAddressInfoEXT( *reinterpret_cast<DescriptorAddressInfoEXT const *>( &rhs ) )
{}
DescriptorAddressInfoEXT & operator=( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorAddressInfoEXT & operator=( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setAddress( VULKAN_HPP_NAMESPACE::DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT
{
address = address_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
{
range = range_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorAddressInfoEXT*>( this );
}
operator VkDescriptorAddressInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorAddressInfoEXT*>( this );
}
operator VkDescriptorAddressInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorAddressInfoEXT*>( this );
}
operator VkDescriptorAddressInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorAddressInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Format const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, address, range, format );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorAddressInfoEXT const & ) const = default;
#else
bool operator==( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( address == rhs.address )
&& ( range == rhs.range )
&& ( format == rhs.format );
#endif
}
bool operator!=( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorAddressInfoEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceAddress address = {};
VULKAN_HPP_NAMESPACE::DeviceSize range = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::eDescriptorAddressInfoEXT>
{
using Type = DescriptorAddressInfoEXT;
};
// wrapper struct for struct VkDescriptorBufferBindingInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBufferBindingInfoEXT.html
struct DescriptorBufferBindingInfoEXT
{
using NativeType = VkDescriptorBufferBindingInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorBufferBindingInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT(VULKAN_HPP_NAMESPACE::DeviceAddress address_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, address{ address_ }, usage{ usage_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorBufferBindingInfoEXT( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorBufferBindingInfoEXT( *reinterpret_cast<DescriptorBufferBindingInfoEXT const *>( &rhs ) )
{}
DescriptorBufferBindingInfoEXT & operator=( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorBufferBindingInfoEXT & operator=( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setAddress( VULKAN_HPP_NAMESPACE::DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT
{
address = address_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorBufferBindingInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorBufferBindingInfoEXT*>( this );
}
operator VkDescriptorBufferBindingInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorBufferBindingInfoEXT*>( this );
}
operator VkDescriptorBufferBindingInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorBufferBindingInfoEXT*>( this );
}
operator VkDescriptorBufferBindingInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorBufferBindingInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::BufferUsageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, address, usage );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorBufferBindingInfoEXT const & ) const = default;
#else
bool operator==( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( address == rhs.address )
&& ( usage == rhs.usage );
#endif
}
bool operator!=( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorBufferBindingInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceAddress address = {};
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorBufferBindingInfoEXT>
{
using Type = DescriptorBufferBindingInfoEXT;
};
// wrapper struct for struct VkDescriptorBufferBindingPushDescriptorBufferHandleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBufferBindingPushDescriptorBufferHandleEXT.html
struct DescriptorBufferBindingPushDescriptorBufferHandleEXT
{
using NativeType = VkDescriptorBufferBindingPushDescriptorBufferHandleEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorBufferBindingPushDescriptorBufferHandleEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, buffer{ buffer_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorBufferBindingPushDescriptorBufferHandleEXT( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorBufferBindingPushDescriptorBufferHandleEXT( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorBufferBindingPushDescriptorBufferHandleEXT( *reinterpret_cast<DescriptorBufferBindingPushDescriptorBufferHandleEXT const *>( &rhs ) )
{}
DescriptorBufferBindingPushDescriptorBufferHandleEXT & operator=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorBufferBindingPushDescriptorBufferHandleEXT & operator=( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorBufferBindingPushDescriptorBufferHandleEXT*>( this );
}
operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorBufferBindingPushDescriptorBufferHandleEXT*>( this );
}
operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorBufferBindingPushDescriptorBufferHandleEXT*>( this );
}
operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorBufferBindingPushDescriptorBufferHandleEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, buffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & ) const = default;
#else
bool operator==( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( buffer == rhs.buffer );
#endif
}
bool operator!=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT>
{
using Type = DescriptorBufferBindingPushDescriptorBufferHandleEXT;
};
// wrapper struct for struct VkDescriptorBufferInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBufferInfo.html
struct DescriptorBufferInfo
{
using NativeType = VkDescriptorBufferInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorBufferInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT
: buffer{ buffer_ }, offset{ offset_ }, range{ range_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorBufferInfo( *reinterpret_cast<DescriptorBufferInfo const *>( &rhs ) )
{}
DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
{
range = range_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorBufferInfo*>( this );
}
operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorBufferInfo*>( this );
}
operator VkDescriptorBufferInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorBufferInfo*>( this );
}
operator VkDescriptorBufferInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorBufferInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( buffer, offset, range );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorBufferInfo const & ) const = default;
#else
bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( buffer == rhs.buffer )
&& ( offset == rhs.offset )
&& ( range == rhs.range );
#endif
}
bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize range = {};
};
// wrapper struct for struct VkDescriptorImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorImageInfo.html
struct DescriptorImageInfo
{
using NativeType = VkDescriptorImageInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorImageInfo(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
: sampler{ sampler_ }, imageView{ imageView_ }, imageLayout{ imageLayout_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
{}
DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
{
sampler = sampler_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
{
imageLayout = imageLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorImageInfo*>( this );
}
operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorImageInfo*>( this );
}
operator VkDescriptorImageInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorImageInfo*>( this );
}
operator VkDescriptorImageInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorImageInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Sampler const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sampler, imageView, imageLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorImageInfo const & ) const = default;
#else
bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sampler == rhs.sampler )
&& ( imageView == rhs.imageView )
&& ( imageLayout == rhs.imageLayout );
#endif
}
bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Sampler sampler = {};
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
union DescriptorDataEXT
{
using NativeType = VkDescriptorDataEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::Sampler * pSampler_ = {} )
: pSampler( pSampler_ )
{}
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pDescriptorImageInfo_ )
: pCombinedImageSampler( pDescriptorImageInfo_ )
{}
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pDescriptorAddressInfoEXT_ )
: pUniformTexelBuffer( pDescriptorAddressInfoEXT_ )
{}
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ )
: accelerationStructure( accelerationStructure_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampler( const VULKAN_HPP_NAMESPACE::Sampler * pSampler_ ) VULKAN_HPP_NOEXCEPT
{
pSampler = pSampler_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPCombinedImageSampler( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pCombinedImageSampler_ ) VULKAN_HPP_NOEXCEPT
{
pCombinedImageSampler = pCombinedImageSampler_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPInputAttachmentImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pInputAttachmentImage_ ) VULKAN_HPP_NOEXCEPT
{
pInputAttachmentImage = pInputAttachmentImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampledImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pSampledImage_ ) VULKAN_HPP_NOEXCEPT
{
pSampledImage = pSampledImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pStorageImage_ ) VULKAN_HPP_NOEXCEPT
{
pStorageImage = pStorageImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPUniformTexelBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformTexelBuffer_ ) VULKAN_HPP_NOEXCEPT
{
pUniformTexelBuffer = pUniformTexelBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageTexelBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageTexelBuffer_ ) VULKAN_HPP_NOEXCEPT
{
pStorageTexelBuffer = pStorageTexelBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPUniformBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformBuffer_ ) VULKAN_HPP_NOEXCEPT
{
pUniformBuffer = pUniformBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
{
pStorageBuffer = pStorageBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorDataEXT const &() const
{
return *reinterpret_cast<const VkDescriptorDataEXT*>( this );
}
operator VkDescriptorDataEXT &()
{
return *reinterpret_cast<VkDescriptorDataEXT*>( this );
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
const VULKAN_HPP_NAMESPACE::Sampler * pSampler;
const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pCombinedImageSampler;
const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pInputAttachmentImage;
const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pSampledImage;
const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pStorageImage;
const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformTexelBuffer;
const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageTexelBuffer;
const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformBuffer;
const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageBuffer;
VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure;
#else
const VkSampler * pSampler;
const VkDescriptorImageInfo * pCombinedImageSampler;
const VkDescriptorImageInfo * pInputAttachmentImage;
const VkDescriptorImageInfo * pSampledImage;
const VkDescriptorImageInfo * pStorageImage;
const VkDescriptorAddressInfoEXT * pUniformTexelBuffer;
const VkDescriptorAddressInfoEXT * pStorageTexelBuffer;
const VkDescriptorAddressInfoEXT * pUniformBuffer;
const VkDescriptorAddressInfoEXT * pStorageBuffer;
VkDeviceAddress accelerationStructure;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
// wrapper struct for struct VkDescriptorGetInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorGetInfoEXT.html
struct DescriptorGetInfoEXT
{
using NativeType = VkDescriptorGetInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorGetInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT(VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, VULKAN_HPP_NAMESPACE::DescriptorDataEXT data_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, type{ type_ }, data{ data_ }
{}
VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorGetInfoEXT( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorGetInfoEXT( *reinterpret_cast<DescriptorGetInfoEXT const *>( &rhs ) )
{}
DescriptorGetInfoEXT & operator=( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorGetInfoEXT & operator=( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setData( VULKAN_HPP_NAMESPACE::DescriptorDataEXT const & data_ ) VULKAN_HPP_NOEXCEPT
{
data = data_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorGetInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorGetInfoEXT*>( this );
}
operator VkDescriptorGetInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorGetInfoEXT*>( this );
}
operator VkDescriptorGetInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorGetInfoEXT*>( this );
}
operator VkDescriptorGetInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorGetInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, VULKAN_HPP_NAMESPACE::DescriptorDataEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, type, data );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorGetInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
VULKAN_HPP_NAMESPACE::DescriptorDataEXT data = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorGetInfoEXT>
{
using Type = DescriptorGetInfoEXT;
};
// wrapper struct for struct VkDescriptorGetTensorInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorGetTensorInfoARM.html
struct DescriptorGetTensorInfoARM
{
using NativeType = VkDescriptorGetTensorInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorGetTensorInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorGetTensorInfoARM(VULKAN_HPP_NAMESPACE::TensorViewARM tensorView_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tensorView{ tensorView_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorGetTensorInfoARM( DescriptorGetTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorGetTensorInfoARM( VkDescriptorGetTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorGetTensorInfoARM( *reinterpret_cast<DescriptorGetTensorInfoARM const *>( &rhs ) )
{}
DescriptorGetTensorInfoARM & operator=( DescriptorGetTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorGetTensorInfoARM & operator=( VkDescriptorGetTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorGetTensorInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorGetTensorInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorGetTensorInfoARM & setTensorView( VULKAN_HPP_NAMESPACE::TensorViewARM tensorView_ ) VULKAN_HPP_NOEXCEPT
{
tensorView = tensorView_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorGetTensorInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorGetTensorInfoARM*>( this );
}
operator VkDescriptorGetTensorInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorGetTensorInfoARM*>( this );
}
operator VkDescriptorGetTensorInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorGetTensorInfoARM*>( this );
}
operator VkDescriptorGetTensorInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorGetTensorInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TensorViewARM const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tensorView );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorGetTensorInfoARM const & ) const = default;
#else
bool operator==( DescriptorGetTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tensorView == rhs.tensorView );
#endif
}
bool operator!=( DescriptorGetTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorGetTensorInfoARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TensorViewARM tensorView = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorGetTensorInfoARM>
{
using Type = DescriptorGetTensorInfoARM;
};
// wrapper struct for struct VkDescriptorPoolSize, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolSize.html
struct DescriptorPoolSize
{
using NativeType = VkDescriptorPoolSize;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorPoolSize(VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
: type{ type_ }, descriptorCount{ descriptorCount_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorPoolSize( *reinterpret_cast<DescriptorPoolSize const *>( &rhs ) )
{}
DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = descriptorCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorPoolSize*>( this );
}
operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorPoolSize*>( this );
}
operator VkDescriptorPoolSize const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorPoolSize*>( this );
}
operator VkDescriptorPoolSize *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorPoolSize*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DescriptorType const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( type, descriptorCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorPoolSize const & ) const = default;
#else
bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( type == rhs.type )
&& ( descriptorCount == rhs.descriptorCount );
#endif
}
bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
uint32_t descriptorCount = {};
};
// wrapper struct for struct VkDescriptorPoolCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolCreateInfo.html
struct DescriptorPoolCreateInfo
{
using NativeType = VkDescriptorPoolCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, uint32_t maxSets_ = {}, uint32_t poolSizeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, maxSets{ maxSets_ }, poolSizeCount{ poolSizeCount_ }, pPoolSizes{ pPoolSizes_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorPoolCreateInfo( *reinterpret_cast<DescriptorPoolCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, uint32_t maxSets_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT
{
maxSets = maxSets_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT
{
poolSizeCount = poolSizeCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
{
pPoolSizes = pPoolSizes_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorPoolCreateInfo & setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ ) VULKAN_HPP_NOEXCEPT
{
poolSizeCount = static_cast<uint32_t>( poolSizes_.size() );
pPoolSizes = poolSizes_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>( this );
}
operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorPoolCreateInfo*>( this );
}
operator VkDescriptorPoolCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorPoolCreateInfo*>( this );
}
operator VkDescriptorPoolCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorPoolCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, maxSets, poolSizeCount, pPoolSizes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorPoolCreateInfo const & ) const = default;
#else
bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( maxSets == rhs.maxSets )
&& ( poolSizeCount == rhs.poolSizeCount )
&& ( pPoolSizes == rhs.pPoolSizes );
#endif
}
bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {};
uint32_t maxSets = {};
uint32_t poolSizeCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorPoolCreateInfo>
{
using Type = DescriptorPoolCreateInfo;
};
// wrapper struct for struct VkDescriptorPoolInlineUniformBlockCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolInlineUniformBlockCreateInfo.html
struct DescriptorPoolInlineUniformBlockCreateInfo
{
using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo(uint32_t maxInlineUniformBlockBindings_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxInlineUniformBlockBindings{ maxInlineUniformBlockBindings_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorPoolInlineUniformBlockCreateInfo( *reinterpret_cast<DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs ) )
{}
DescriptorPoolInlineUniformBlockCreateInfo & operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorPoolInlineUniformBlockCreateInfo & operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
{
maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfo*>( this );
}
operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfo*>( this );
}
operator VkDescriptorPoolInlineUniformBlockCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfo*>( this );
}
operator VkDescriptorPoolInlineUniformBlockCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxInlineUniformBlockBindings );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfo const & ) const = default;
#else
bool operator==( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
#endif
}
bool operator!=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
const void * pNext = {};
uint32_t maxInlineUniformBlockBindings = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorPoolInlineUniformBlockCreateInfo>
{
using Type = DescriptorPoolInlineUniformBlockCreateInfo;
};
using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo;
// wrapper struct for struct VkDescriptorSetAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetAllocateInfo.html
struct DescriptorSetAllocateInfo
{
using NativeType = VkDescriptorSetAllocateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, uint32_t descriptorSetCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, descriptorPool{ descriptorPool_ }, descriptorSetCount{ descriptorSetCount_ }, pSetLayouts{ pSetLayouts_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetAllocateInfo( *reinterpret_cast<DescriptorSetAllocateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_, const void * pNext_ = nullptr )
: pNext( pNext_ ), descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
{
descriptorPool = descriptorPool_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetCount = descriptorSetCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pSetLayouts = pSetLayouts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorSetAllocateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetCount = static_cast<uint32_t>( setLayouts_.size() );
pSetLayouts = setLayouts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>( this );
}
operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetAllocateInfo*>( this );
}
operator VkDescriptorSetAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorSetAllocateInfo*>( this );
}
operator VkDescriptorSetAllocateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorSetAllocateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorPool const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, descriptorPool, descriptorSetCount, pSetLayouts );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorSetAllocateInfo const & ) const = default;
#else
bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( descriptorPool == rhs.descriptorPool )
&& ( descriptorSetCount == rhs.descriptorSetCount )
&& ( pSetLayouts == rhs.pSetLayouts );
#endif
}
bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {};
uint32_t descriptorSetCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorSetAllocateInfo>
{
using Type = DescriptorSetAllocateInfo;
};
// wrapper struct for struct VkDescriptorSetBindingReferenceVALVE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetBindingReferenceVALVE.html
struct DescriptorSetBindingReferenceVALVE
{
using NativeType = VkDescriptorSetBindingReferenceVALVE;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetBindingReferenceVALVE;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, uint32_t binding_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, descriptorSetLayout{ descriptorSetLayout_ }, binding{ binding_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetBindingReferenceVALVE( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetBindingReferenceVALVE( *reinterpret_cast<DescriptorSetBindingReferenceVALVE const *>( &rhs ) )
{}
DescriptorSetBindingReferenceVALVE & operator=( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorSetBindingReferenceVALVE & operator=( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetLayout = descriptorSetLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
{
binding = binding_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorSetBindingReferenceVALVE const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE*>( this );
}
operator VkDescriptorSetBindingReferenceVALVE &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetBindingReferenceVALVE*>( this );
}
operator VkDescriptorSetBindingReferenceVALVE const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE*>( this );
}
operator VkDescriptorSetBindingReferenceVALVE *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorSetBindingReferenceVALVE*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSetLayout const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, descriptorSetLayout, binding );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorSetBindingReferenceVALVE const & ) const = default;
#else
bool operator==( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( descriptorSetLayout == rhs.descriptorSetLayout )
&& ( binding == rhs.binding );
#endif
}
bool operator!=( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetBindingReferenceVALVE;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
uint32_t binding = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorSetBindingReferenceVALVE>
{
using Type = DescriptorSetBindingReferenceVALVE;
};
// wrapper struct for struct VkDescriptorSetLayoutBinding, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutBinding.html
struct DescriptorSetLayoutBinding
{
using NativeType = VkDescriptorSetLayoutBinding;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {}) VULKAN_HPP_NOEXCEPT
: binding{ binding_ }, descriptorType{ descriptorType_ }, descriptorCount{ descriptorCount_ }, stageFlags{ stageFlags_ }, pImmutableSamplers{ pImmutableSamplers_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetLayoutBinding( *reinterpret_cast<DescriptorSetLayoutBinding const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorSetLayoutBinding( uint32_t binding_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
: binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( static_cast<uint32_t>( immutableSamplers_.size() ) ), stageFlags( stageFlags_ ), pImmutableSamplers( immutableSamplers_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
{
binding = binding_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
descriptorType = descriptorType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = descriptorCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
{
stageFlags = stageFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
{
pImmutableSamplers = pImmutableSamplers_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorSetLayoutBinding & setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = static_cast<uint32_t>( immutableSamplers_.size() );
pImmutableSamplers = immutableSamplers_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>( this );
}
operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetLayoutBinding*>( this );
}
operator VkDescriptorSetLayoutBinding const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorSetLayoutBinding*>( this );
}
operator VkDescriptorSetLayoutBinding *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorSetLayoutBinding*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, const VULKAN_HPP_NAMESPACE::Sampler * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( binding, descriptorType, descriptorCount, stageFlags, pImmutableSamplers );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorSetLayoutBinding const & ) const = default;
#else
bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( binding == rhs.binding )
&& ( descriptorType == rhs.descriptorType )
&& ( descriptorCount == rhs.descriptorCount )
&& ( stageFlags == rhs.stageFlags )
&& ( pImmutableSamplers == rhs.pImmutableSamplers );
#endif
}
bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t binding = {};
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
uint32_t descriptorCount = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {};
};
// wrapper struct for struct VkDescriptorSetLayoutBindingFlagsCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutBindingFlagsCreateInfo.html
struct DescriptorSetLayoutBindingFlagsCreateInfo
{
using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, bindingCount{ bindingCount_ }, pBindingFlags{ pBindingFlags_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast<DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorSetLayoutBindingFlagsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_, const void * pNext_ = nullptr )
: pNext( pNext_ ), bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
{
bindingCount = bindingCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
{
pBindingFlags = pBindingFlags_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT
{
bindingCount = static_cast<uint32_t>( bindingFlags_.size() );
pBindingFlags = bindingFlags_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
}
operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
}
operator VkDescriptorSetLayoutBindingFlagsCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
}
operator VkDescriptorSetLayoutBindingFlagsCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, bindingCount, pBindingFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default;
#else
bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( bindingCount == rhs.bindingCount )
&& ( pBindingFlags == rhs.pBindingFlags );
#endif
}
bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
const void * pNext = {};
uint32_t bindingCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo>
{
using Type = DescriptorSetLayoutBindingFlagsCreateInfo;
};
using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
// wrapper struct for struct VkDescriptorSetLayoutCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutCreateInfo.html
struct DescriptorSetLayoutCreateInfo
{
using NativeType = VkDescriptorSetLayoutCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, bindingCount{ bindingCount_ }, pBindings{ pBindings_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetLayoutCreateInfo( *reinterpret_cast<DescriptorSetLayoutCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
{
bindingCount = bindingCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT
{
pBindings = pBindings_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorSetLayoutCreateInfo & setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ ) VULKAN_HPP_NOEXCEPT
{
bindingCount = static_cast<uint32_t>( bindings_.size() );
pBindings = bindings_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( this );
}
operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>( this );
}
operator VkDescriptorSetLayoutCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( this );
}
operator VkDescriptorSetLayoutCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, bindingCount, pBindings );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default;
#else
bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( bindingCount == rhs.bindingCount )
&& ( pBindings == rhs.pBindings );
#endif
}
bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {};
uint32_t bindingCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorSetLayoutCreateInfo>
{
using Type = DescriptorSetLayoutCreateInfo;
};
// wrapper struct for struct VkDescriptorSetLayoutHostMappingInfoVALVE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutHostMappingInfoVALVE.html
struct DescriptorSetLayoutHostMappingInfoVALVE
{
using NativeType = VkDescriptorSetLayoutHostMappingInfoVALVE;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE(size_t descriptorOffset_ = {}, uint32_t descriptorSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, descriptorOffset{ descriptorOffset_ }, descriptorSize{ descriptorSize_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetLayoutHostMappingInfoVALVE( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetLayoutHostMappingInfoVALVE( *reinterpret_cast<DescriptorSetLayoutHostMappingInfoVALVE const *>( &rhs ) )
{}
DescriptorSetLayoutHostMappingInfoVALVE & operator=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorSetLayoutHostMappingInfoVALVE & operator=( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorOffset( size_t descriptorOffset_ ) VULKAN_HPP_NOEXCEPT
{
descriptorOffset = descriptorOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorSize( uint32_t descriptorSize_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSize = descriptorSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorSetLayoutHostMappingInfoVALVE const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutHostMappingInfoVALVE*>( this );
}
operator VkDescriptorSetLayoutHostMappingInfoVALVE &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE*>( this );
}
operator VkDescriptorSetLayoutHostMappingInfoVALVE const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorSetLayoutHostMappingInfoVALVE*>( this );
}
operator VkDescriptorSetLayoutHostMappingInfoVALVE *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, size_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, descriptorOffset, descriptorSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorSetLayoutHostMappingInfoVALVE const & ) const = default;
#else
bool operator==( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( descriptorOffset == rhs.descriptorOffset )
&& ( descriptorSize == rhs.descriptorSize );
#endif
}
bool operator!=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE;
void * pNext = {};
size_t descriptorOffset = {};
uint32_t descriptorSize = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorSetLayoutHostMappingInfoVALVE>
{
using Type = DescriptorSetLayoutHostMappingInfoVALVE;
};
// wrapper struct for struct VkDescriptorSetLayoutSupport, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutSupport.html
struct DescriptorSetLayoutSupport
{
using NativeType = VkDescriptorSetLayoutSupport;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport(VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, supported{ supported_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetLayoutSupport( *reinterpret_cast<DescriptorSetLayoutSupport const *>( &rhs ) )
{}
DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>( &rhs );
return *this;
}
operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutSupport*>( this );
}
operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetLayoutSupport*>( this );
}
operator VkDescriptorSetLayoutSupport const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorSetLayoutSupport*>( this );
}
operator VkDescriptorSetLayoutSupport *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorSetLayoutSupport*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, supported );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorSetLayoutSupport const & ) const = default;
#else
bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( supported == rhs.supported );
#endif
}
bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 supported = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorSetLayoutSupport>
{
using Type = DescriptorSetLayoutSupport;
};
using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
// wrapper struct for struct VkDescriptorSetVariableDescriptorCountAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetVariableDescriptorCountAllocateInfo.html
struct DescriptorSetVariableDescriptorCountAllocateInfo
{
using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(uint32_t descriptorSetCount_ = {}, const uint32_t * pDescriptorCounts_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, descriptorSetCount{ descriptorSetCount_ }, pDescriptorCounts{ pDescriptorCounts_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast<DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_, const void * pNext_ = nullptr )
: pNext( pNext_ ), descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetCount = descriptorSetCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
{
pDescriptorCounts = pDescriptorCounts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetCount = static_cast<uint32_t>( descriptorCounts_.size() );
pDescriptorCounts = descriptorCounts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
}
operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
}
operator VkDescriptorSetVariableDescriptorCountAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
}
operator VkDescriptorSetVariableDescriptorCountAllocateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, descriptorSetCount, pDescriptorCounts );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default;
#else
bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( descriptorSetCount == rhs.descriptorSetCount )
&& ( pDescriptorCounts == rhs.pDescriptorCounts );
#endif
}
bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
const void * pNext = {};
uint32_t descriptorSetCount = {};
const uint32_t * pDescriptorCounts = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo>
{
using Type = DescriptorSetVariableDescriptorCountAllocateInfo;
};
using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;
// wrapper struct for struct VkDescriptorSetVariableDescriptorCountLayoutSupport, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetVariableDescriptorCountLayoutSupport.html
struct DescriptorSetVariableDescriptorCountLayoutSupport
{
using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(uint32_t maxVariableDescriptorCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxVariableDescriptorCount{ maxVariableDescriptorCount_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast<DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs ) )
{}
DescriptorSetVariableDescriptorCountLayoutSupport & operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
return *this;
}
operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
}
operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
}
operator VkDescriptorSetVariableDescriptorCountLayoutSupport const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
}
operator VkDescriptorSetVariableDescriptorCountLayoutSupport *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxVariableDescriptorCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default;
#else
bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
#endif
}
bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
void * pNext = {};
uint32_t maxVariableDescriptorCount = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport>
{
using Type = DescriptorSetVariableDescriptorCountLayoutSupport;
};
using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;
// wrapper struct for struct VkDescriptorUpdateTemplateEntry, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplateEntry.html
struct DescriptorUpdateTemplateEntry
{
using NativeType = VkDescriptorUpdateTemplateEntry;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, size_t offset_ = {}, size_t stride_ = {}) VULKAN_HPP_NOEXCEPT
: dstBinding{ dstBinding_ }, dstArrayElement{ dstArrayElement_ }, descriptorCount{ descriptorCount_ }, descriptorType{ descriptorType_ }, offset{ offset_ }, stride{ stride_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorUpdateTemplateEntry( *reinterpret_cast<DescriptorUpdateTemplateEntry const *>( &rhs ) )
{}
DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
{
dstBinding = dstBinding_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
{
dstArrayElement = dstArrayElement_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = descriptorCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
descriptorType = descriptorType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT
{
stride = stride_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry*>( this );
}
operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorUpdateTemplateEntry*>( this );
}
operator VkDescriptorUpdateTemplateEntry const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorUpdateTemplateEntry*>( this );
}
operator VkDescriptorUpdateTemplateEntry *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorUpdateTemplateEntry*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, size_t const &, size_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( dstBinding, dstArrayElement, descriptorCount, descriptorType, offset, stride );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default;
#else
bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( dstBinding == rhs.dstBinding )
&& ( dstArrayElement == rhs.dstArrayElement )
&& ( descriptorCount == rhs.descriptorCount )
&& ( descriptorType == rhs.descriptorType )
&& ( offset == rhs.offset )
&& ( stride == rhs.stride );
#endif
}
bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t dstBinding = {};
uint32_t dstArrayElement = {};
uint32_t descriptorCount = {};
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
size_t offset = {};
size_t stride = {};
};
using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
// wrapper struct for struct VkDescriptorUpdateTemplateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplateCreateInfo.html
struct DescriptorUpdateTemplateCreateInfo
{
using NativeType = VkDescriptorUpdateTemplateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, uint32_t descriptorUpdateEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {}, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, descriptorUpdateEntryCount{ descriptorUpdateEntryCount_ }, pDescriptorUpdateEntries{ pDescriptorUpdateEntries_ }, templateType{ templateType_ }, descriptorSetLayout{ descriptorSetLayout_ }, pipelineBindPoint{ pipelineBindPoint_ }, pipelineLayout{ pipelineLayout_ }, set{ set_ }
{}
VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorUpdateTemplateCreateInfo( *reinterpret_cast<DescriptorUpdateTemplateCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), descriptorUpdateEntryCount( static_cast<uint32_t>( descriptorUpdateEntries_.size() ) ), pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
{
pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
{
descriptorUpdateEntryCount = static_cast<uint32_t>( descriptorUpdateEntries_.size() );
pDescriptorUpdateEntries = descriptorUpdateEntries_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
{
templateType = templateType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetLayout = descriptorSetLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
{
pipelineLayout = pipelineLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
{
set = set_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( this );
}
operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>( this );
}
operator VkDescriptorUpdateTemplateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( this );
}
operator VkDescriptorUpdateTemplateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * const &, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType const &, VULKAN_HPP_NAMESPACE::DescriptorSetLayout const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, descriptorUpdateEntryCount, pDescriptorUpdateEntries, templateType, descriptorSetLayout, pipelineBindPoint, pipelineLayout, set );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default;
#else
bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount )
&& ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries )
&& ( templateType == rhs.templateType )
&& ( descriptorSetLayout == rhs.descriptorSetLayout )
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
&& ( pipelineLayout == rhs.pipelineLayout )
&& ( set == rhs.set );
#endif
}
bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {};
uint32_t descriptorUpdateEntryCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {};
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet;
VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
uint32_t set = {};
};
template <>
struct CppType<StructureType, StructureType::eDescriptorUpdateTemplateCreateInfo>
{
using Type = DescriptorUpdateTemplateCreateInfo;
};
using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
// wrapper struct for struct VkDeviceAddressBindingCallbackDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceAddressBindingCallbackDataEXT.html
struct DeviceAddressBindingCallbackDataEXT
{
using NativeType = VkDeviceAddressBindingCallbackDataEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceAddressBindingCallbackDataEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT(VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, baseAddress{ baseAddress_ }, size{ size_ }, bindingType{ bindingType_ }
{}
VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceAddressBindingCallbackDataEXT( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceAddressBindingCallbackDataEXT( *reinterpret_cast<DeviceAddressBindingCallbackDataEXT const *>( &rhs ) )
{}
DeviceAddressBindingCallbackDataEXT & operator=( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceAddressBindingCallbackDataEXT & operator=( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBaseAddress( VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ ) VULKAN_HPP_NOEXCEPT
{
baseAddress = baseAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBindingType( VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ ) VULKAN_HPP_NOEXCEPT
{
bindingType = bindingType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceAddressBindingCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceAddressBindingCallbackDataEXT*>( this );
}
operator VkDeviceAddressBindingCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceAddressBindingCallbackDataEXT*>( this );
}
operator VkDeviceAddressBindingCallbackDataEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceAddressBindingCallbackDataEXT*>( this );
}
operator VkDeviceAddressBindingCallbackDataEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceAddressBindingCallbackDataEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, baseAddress, size, bindingType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceAddressBindingCallbackDataEXT const & ) const = default;
#else
bool operator==( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( baseAddress == rhs.baseAddress )
&& ( size == rhs.size )
&& ( bindingType == rhs.bindingType );
#endif
}
bool operator!=( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceAddressBindingCallbackDataEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind;
};
template <>
struct CppType<StructureType, StructureType::eDeviceAddressBindingCallbackDataEXT>
{
using Type = DeviceAddressBindingCallbackDataEXT;
};
// wrapper struct for struct VkDeviceBufferMemoryRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceBufferMemoryRequirements.html
struct DeviceBufferMemoryRequirements
{
using NativeType = VkDeviceBufferMemoryRequirements;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceBufferMemoryRequirements;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pCreateInfo{ pCreateInfo_ }
{}
VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceBufferMemoryRequirements( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceBufferMemoryRequirements( *reinterpret_cast<DeviceBufferMemoryRequirements const *>( &rhs ) )
{}
DeviceBufferMemoryRequirements & operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceBufferMemoryRequirements & operator=( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
{
pCreateInfo = pCreateInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceBufferMemoryRequirements*>( this );
}
operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceBufferMemoryRequirements*>( this );
}
operator VkDeviceBufferMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceBufferMemoryRequirements*>( this );
}
operator VkDeviceBufferMemoryRequirements *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceBufferMemoryRequirements*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::BufferCreateInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pCreateInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceBufferMemoryRequirements const & ) const = default;
#else
bool operator==( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pCreateInfo == rhs.pCreateInfo );
#endif
}
bool operator!=( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceBufferMemoryRequirements;
const void * pNext = {};
const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceBufferMemoryRequirements>
{
using Type = DeviceBufferMemoryRequirements;
};
using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements;
// wrapper struct for struct VkDeviceQueueCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueCreateInfo.html
struct DeviceQueueCreateInfo
{
using NativeType = VkDeviceQueueCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueCount_ = {}, const float * pQueuePriorities_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, queueFamilyIndex{ queueFamilyIndex_ }, queueCount{ queueCount_ }, pQueuePriorities{ pQueuePriorities_ }
{}
VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceQueueCreateInfo( *reinterpret_cast<DeviceQueueCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( static_cast<uint32_t>( queuePriorities_.size() ) ), pQueuePriorities( queuePriorities_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndex = queueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT
{
queueCount = queueCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
{
pQueuePriorities = pQueuePriorities_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
{
queueCount = static_cast<uint32_t>( queuePriorities_.size() );
pQueuePriorities = queuePriorities_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceQueueCreateInfo*>( this );
}
operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceQueueCreateInfo*>( this );
}
operator VkDeviceQueueCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceQueueCreateInfo*>( this );
}
operator VkDeviceQueueCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceQueueCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags const &, uint32_t const &, uint32_t const &, const float * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, queueFamilyIndex, queueCount, pQueuePriorities );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceQueueCreateInfo const & ) const = default;
#else
bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( queueFamilyIndex == rhs.queueFamilyIndex )
&& ( queueCount == rhs.queueCount )
&& ( pQueuePriorities == rhs.pQueuePriorities );
#endif
}
bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
uint32_t queueFamilyIndex = {};
uint32_t queueCount = {};
const float * pQueuePriorities = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceQueueCreateInfo>
{
using Type = DeviceQueueCreateInfo;
};
// wrapper struct for struct VkPhysicalDeviceFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFeatures.html
struct PhysicalDeviceFeatures
{
using NativeType = VkPhysicalDeviceFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {}) VULKAN_HPP_NOEXCEPT
: robustBufferAccess{ robustBufferAccess_ }, fullDrawIndexUint32{ fullDrawIndexUint32_ }, imageCubeArray{ imageCubeArray_ }, independentBlend{ independentBlend_ }, geometryShader{ geometryShader_ }, tessellationShader{ tessellationShader_ }, sampleRateShading{ sampleRateShading_ }, dualSrcBlend{ dualSrcBlend_ }, logicOp{ logicOp_ }, multiDrawIndirect{ multiDrawIndirect_ }, drawIndirectFirstInstance{ drawIndirectFirstInstance_ }, depthClamp{ depthClamp_ }, depthBiasClamp{ depthBiasClamp_ }, fillModeNonSolid{ fillModeNonSolid_ }, depthBounds{ depthBounds_ }, wideLines{ wideLines_ }, largePoints{ largePoints_ }, alphaToOne{ alphaToOne_ }, multiViewport{ multiViewport_ }, samplerAnisotropy{ samplerAnisotropy_ }, textureCompressionETC2{ textureCompressionETC2_ }, textureCompressionASTC_LDR{ textureCompressionASTC_LDR_ }, textureCompressionBC{ textureCompressionBC_ }, occlusionQueryPrecise{ occlusionQueryPrecise_ }, pipelineStatisticsQuery{ pipelineStatisticsQuery_ }, vertexPipelineStoresAndAtomics{ vertexPipelineStoresAndAtomics_ }, fragmentStoresAndAtomics{ fragmentStoresAndAtomics_ }, shaderTessellationAndGeometryPointSize{ shaderTessellationAndGeometryPointSize_ }, shaderImageGatherExtended{ shaderImageGatherExtended_ }, shaderStorageImageExtendedFormats{ shaderStorageImageExtendedFormats_ }, shaderStorageImageMultisample{ shaderStorageImageMultisample_ }, shaderStorageImageReadWithoutFormat{ shaderStorageImageReadWithoutFormat_ }, shaderStorageImageWriteWithoutFormat{ shaderStorageImageWriteWithoutFormat_ }, shaderUniformBufferArrayDynamicIndexing{ shaderUniformBufferArrayDynamicIndexing_ }, shaderSampledImageArrayDynamicIndexing{ shaderSampledImageArrayDynamicIndexing_ }, shaderStorageBufferArrayDynamicIndexing{ shaderStorageBufferArrayDynamicIndexing_ }, shaderStorageImageArrayDynamicIndexing{ shaderStorageImageArrayDynamicIndexing_ }, shaderClipDistance{ shaderClipDistance_ }, shaderCullDistance{ shaderCullDistance_ }, shaderFloat64{ shaderFloat64_ }, shaderInt64{ shaderInt64_ }, shaderInt16{ shaderInt16_ }, shaderResourceResidency{ shaderResourceResidency_ }, shaderResourceMinLod{ shaderResourceMinLod_ }, sparseBinding{ sparseBinding_ }, sparseResidencyBuffer{ sparseResidencyBuffer_ }, sparseResidencyImage2D{ sparseResidencyImage2D_ }, sparseResidencyImage3D{ sparseResidencyImage3D_ }, sparseResidency2Samples{ sparseResidency2Samples_ }, sparseResidency4Samples{ sparseResidency4Samples_ }, sparseResidency8Samples{ sparseResidency8Samples_ }, sparseResidency16Samples{ sparseResidency16Samples_ }, sparseResidencyAliased{ sparseResidencyAliased_ }, variableMultisampleRate{ variableMultisampleRate_ }, inheritedQueries{ inheritedQueries_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFeatures( *reinterpret_cast<PhysicalDeviceFeatures const *>( &rhs ) )
{}
PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
{
robustBufferAccess = robustBufferAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
{
fullDrawIndexUint32 = fullDrawIndexUint32_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
{
imageCubeArray = imageCubeArray_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
{
independentBlend = independentBlend_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
{
geometryShader = geometryShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
{
tessellationShader = tessellationShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
{
sampleRateShading = sampleRateShading_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
{
dualSrcBlend = dualSrcBlend_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
{
logicOp = logicOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
{
multiDrawIndirect = multiDrawIndirect_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
{
drawIndirectFirstInstance = drawIndirectFirstInstance_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
{
depthClamp = depthClamp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasClamp = depthBiasClamp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
{
fillModeNonSolid = fillModeNonSolid_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
{
depthBounds = depthBounds_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
{
wideLines = wideLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
{
largePoints = largePoints_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
{
alphaToOne = alphaToOne_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
{
multiViewport = multiViewport_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
{
samplerAnisotropy = samplerAnisotropy_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionETC2 = textureCompressionETC2_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionBC = textureCompressionBC_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
{
occlusionQueryPrecise = occlusionQueryPrecise_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStatisticsQuery = pipelineStatisticsQuery_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
{
vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
{
fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
{
shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
{
shaderImageGatherExtended = shaderImageGatherExtended_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageMultisample = shaderStorageImageMultisample_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
{
shaderClipDistance = shaderClipDistance_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
{
shaderCullDistance = shaderCullDistance_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloat64 = shaderFloat64_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt64 = shaderInt64_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt16 = shaderInt16_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
{
shaderResourceResidency = shaderResourceResidency_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
{
shaderResourceMinLod = shaderResourceMinLod_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
{
sparseBinding = sparseBinding_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyBuffer = sparseResidencyBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyImage2D = sparseResidencyImage2D_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyImage3D = sparseResidencyImage3D_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency2Samples = sparseResidency2Samples_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency4Samples = sparseResidency4Samples_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency8Samples = sparseResidency8Samples_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency16Samples = sparseResidency16Samples_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyAliased = sparseResidencyAliased_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
{
variableMultisampleRate = variableMultisampleRate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
{
inheritedQueries = inheritedQueries_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFeatures*>( this );
}
operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFeatures*>( this );
}
operator VkPhysicalDeviceFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFeatures*>( this );
}
operator VkPhysicalDeviceFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( robustBufferAccess, fullDrawIndexUint32, imageCubeArray, independentBlend, geometryShader, tessellationShader, sampleRateShading, dualSrcBlend, logicOp, multiDrawIndirect, drawIndirectFirstInstance, depthClamp, depthBiasClamp, fillModeNonSolid, depthBounds, wideLines, largePoints, alphaToOne, multiViewport, samplerAnisotropy, textureCompressionETC2, textureCompressionASTC_LDR, textureCompressionBC, occlusionQueryPrecise, pipelineStatisticsQuery, vertexPipelineStoresAndAtomics, fragmentStoresAndAtomics, shaderTessellationAndGeometryPointSize, shaderImageGatherExtended, shaderStorageImageExtendedFormats, shaderStorageImageMultisample, shaderStorageImageReadWithoutFormat, shaderStorageImageWriteWithoutFormat, shaderUniformBufferArrayDynamicIndexing, shaderSampledImageArrayDynamicIndexing, shaderStorageBufferArrayDynamicIndexing, shaderStorageImageArrayDynamicIndexing, shaderClipDistance, shaderCullDistance, shaderFloat64, shaderInt64, shaderInt16, shaderResourceResidency, shaderResourceMinLod, sparseBinding, sparseResidencyBuffer, sparseResidencyImage2D, sparseResidencyImage3D, sparseResidency2Samples, sparseResidency4Samples, sparseResidency8Samples, sparseResidency16Samples, sparseResidencyAliased, variableMultisampleRate, inheritedQueries );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( robustBufferAccess == rhs.robustBufferAccess )
&& ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
&& ( imageCubeArray == rhs.imageCubeArray )
&& ( independentBlend == rhs.independentBlend )
&& ( geometryShader == rhs.geometryShader )
&& ( tessellationShader == rhs.tessellationShader )
&& ( sampleRateShading == rhs.sampleRateShading )
&& ( dualSrcBlend == rhs.dualSrcBlend )
&& ( logicOp == rhs.logicOp )
&& ( multiDrawIndirect == rhs.multiDrawIndirect )
&& ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
&& ( depthClamp == rhs.depthClamp )
&& ( depthBiasClamp == rhs.depthBiasClamp )
&& ( fillModeNonSolid == rhs.fillModeNonSolid )
&& ( depthBounds == rhs.depthBounds )
&& ( wideLines == rhs.wideLines )
&& ( largePoints == rhs.largePoints )
&& ( alphaToOne == rhs.alphaToOne )
&& ( multiViewport == rhs.multiViewport )
&& ( samplerAnisotropy == rhs.samplerAnisotropy )
&& ( textureCompressionETC2 == rhs.textureCompressionETC2 )
&& ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
&& ( textureCompressionBC == rhs.textureCompressionBC )
&& ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
&& ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
&& ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
&& ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
&& ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
&& ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
&& ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
&& ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
&& ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
&& ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
&& ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
&& ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
&& ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
&& ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
&& ( shaderClipDistance == rhs.shaderClipDistance )
&& ( shaderCullDistance == rhs.shaderCullDistance )
&& ( shaderFloat64 == rhs.shaderFloat64 )
&& ( shaderInt64 == rhs.shaderInt64 )
&& ( shaderInt16 == rhs.shaderInt16 )
&& ( shaderResourceResidency == rhs.shaderResourceResidency )
&& ( shaderResourceMinLod == rhs.shaderResourceMinLod )
&& ( sparseBinding == rhs.sparseBinding )
&& ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
&& ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
&& ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
&& ( sparseResidency2Samples == rhs.sparseResidency2Samples )
&& ( sparseResidency4Samples == rhs.sparseResidency4Samples )
&& ( sparseResidency8Samples == rhs.sparseResidency8Samples )
&& ( sparseResidency16Samples == rhs.sparseResidency16Samples )
&& ( sparseResidencyAliased == rhs.sparseResidencyAliased )
&& ( variableMultisampleRate == rhs.variableMultisampleRate )
&& ( inheritedQueries == rhs.inheritedQueries );
#endif
}
bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {};
VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {};
VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {};
VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {};
VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {};
VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {};
VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {};
VULKAN_HPP_NAMESPACE::Bool32 logicOp = {};
VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {};
VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {};
VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {};
VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {};
VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {};
VULKAN_HPP_NAMESPACE::Bool32 wideLines = {};
VULKAN_HPP_NAMESPACE::Bool32 largePoints = {};
VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {};
VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {};
VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {};
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {};
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {};
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {};
VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {};
VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {};
VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {};
};
// wrapper struct for struct VkDeviceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceCreateInfo.html
struct DeviceCreateInfo
{
using NativeType = VkDeviceCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceCreateInfo(VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, uint32_t queueCreateInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {}, uint32_t enabledLayerCount_ = {}, const char * const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char * const * ppEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, queueCreateInfoCount{ queueCreateInfoCount_ }, pQueueCreateInfos{ pQueueCreateInfos_ }, enabledLayerCount{ enabledLayerCount_ }, ppEnabledLayerNames{ ppEnabledLayerNames_ }, enabledExtensionCount{ enabledExtensionCount_ }, ppEnabledExtensionNames{ ppEnabledExtensionNames_ }, pEnabledFeatures{ pEnabledFeatures_ }
{}
VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceCreateInfo( *reinterpret_cast<DeviceCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), queueCreateInfoCount( static_cast<uint32_t>( queueCreateInfos_.size() ) ), pQueueCreateInfos( queueCreateInfos_.data() ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() ), pEnabledFeatures( pEnabledFeatures_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
queueCreateInfoCount = queueCreateInfoCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
{
pQueueCreateInfos = pQueueCreateInfos_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceCreateInfo & setQueueCreateInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
{
queueCreateInfoCount = static_cast<uint32_t>( queueCreateInfos_.size() );
pQueueCreateInfos = queueCreateInfos_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
{
enabledLayerCount = enabledLayerCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
{
ppEnabledLayerNames = ppEnabledLayerNames_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
{
enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
ppEnabledLayerNames = pEnabledLayerNames_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
{
enabledExtensionCount = enabledExtensionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
{
ppEnabledExtensionNames = ppEnabledExtensionNames_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
{
enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
ppEnabledExtensionNames = pEnabledExtensionNames_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
{
pEnabledFeatures = pEnabledFeatures_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceCreateInfo*>( this );
}
operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceCreateInfo*>( this );
}
operator VkDeviceCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceCreateInfo*>( this );
}
operator VkDeviceCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * const &, uint32_t const &, const char * const * const &, uint32_t const &, const char * const * const &, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, queueCreateInfoCount, pQueueCreateInfos, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames, pEnabledFeatures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = queueCreateInfoCount <=> rhs.queueCreateInfoCount; cmp != 0 ) return cmp;
if ( auto cmp = pQueueCreateInfos <=> rhs.pQueueCreateInfos; cmp != 0 ) return cmp;
if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) return cmp;
for ( size_t i = 0; i < enabledLayerCount; ++i )
{
if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] )
if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 )
return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
}
if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) return cmp;
for ( size_t i = 0; i < enabledExtensionCount; ++i )
{
if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] )
if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 )
return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
}
if ( auto cmp = pEnabledFeatures <=> rhs.pEnabledFeatures; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( queueCreateInfoCount == rhs.queueCreateInfoCount )
&& ( pQueueCreateInfos == rhs.pQueueCreateInfos )
&& ( enabledLayerCount == rhs.enabledLayerCount )
&& std::equal( ppEnabledLayerNames, ppEnabledLayerNames + enabledLayerCount, rhs.ppEnabledLayerNames, []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } )
&& ( enabledExtensionCount == rhs.enabledExtensionCount )
&& std::equal( ppEnabledExtensionNames, ppEnabledExtensionNames + enabledExtensionCount, rhs.ppEnabledExtensionNames, []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } )
&& ( pEnabledFeatures == rhs.pEnabledFeatures );
}
bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {};
uint32_t queueCreateInfoCount = {};
const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos = {};
uint32_t enabledLayerCount = {};
const char * const * ppEnabledLayerNames = {};
uint32_t enabledExtensionCount = {};
const char * const * ppEnabledExtensionNames = {};
const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceCreateInfo>
{
using Type = DeviceCreateInfo;
};
// wrapper struct for struct VkDeviceMemoryReportCallbackDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryReportCallbackDataEXT.html
struct DeviceMemoryReportCallbackDataEXT
{
using NativeType = VkDeviceMemoryReportCallbackDataEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, uint64_t memoryObjectId_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint32_t heapIndex_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, type{ type_ }, memoryObjectId{ memoryObjectId_ }, size{ size_ }, objectType{ objectType_ }, objectHandle{ objectHandle_ }, heapIndex{ heapIndex_ }
{}
VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceMemoryReportCallbackDataEXT( *reinterpret_cast<DeviceMemoryReportCallbackDataEXT const *>( &rhs ) )
{}
DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT const *>( &rhs );
return *this;
}
operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceMemoryReportCallbackDataEXT*>( this );
}
operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceMemoryReportCallbackDataEXT*>( this );
}
operator VkDeviceMemoryReportCallbackDataEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceMemoryReportCallbackDataEXT*>( this );
}
operator VkDeviceMemoryReportCallbackDataEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceMemoryReportCallbackDataEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT const &, VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT const &, uint64_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default;
#else
bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( type == rhs.type )
&& ( memoryObjectId == rhs.memoryObjectId )
&& ( size == rhs.size )
&& ( objectType == rhs.objectType )
&& ( objectHandle == rhs.objectHandle )
&& ( heapIndex == rhs.heapIndex );
#endif
}
bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate;
uint64_t memoryObjectId = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
uint64_t objectHandle = {};
uint32_t heapIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceMemoryReportCallbackDataEXT>
{
using Type = DeviceMemoryReportCallbackDataEXT;
};
typedef void (VKAPI_PTR *PFN_DeviceMemoryReportCallbackEXT)( const VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT * pCallbackData, void * pUserData );
// wrapper struct for struct VkDeviceDeviceMemoryReportCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceDeviceMemoryReportCreateInfoEXT.html
struct DeviceDeviceMemoryReportCreateInfoEXT
{
using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT;
static const bool allowDuplicate = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, void * pUserData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pfnUserCallback{ pfnUserCallback_ }, pUserData{ pUserData_ }
{}
VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast<DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs ) )
{}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic push
# if defined( __clang__ )
# pragma clang diagnostic ignored "-Wunknown-warning-option"
# endif
# pragma GCC diagnostic ignored "-Wcast-function-type"
#endif
VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." )
DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_,
PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_,
void * pUserData_ = {},
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: DeviceDeviceMemoryReportCreateInfoEXT( flags_, reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT>( pfnUserCallback_ ), pUserData_, pNext_ )
{
}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic pop
#endif
DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
{
pfnUserCallback = pfnUserCallback_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
{
pUserData = pUserData_;
return *this;
}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic push
# if defined( __clang__ )
# pragma clang diagnostic ignored "-Wunknown-warning-option"
# endif
# pragma GCC diagnostic ignored "-Wcast-function-type"
#endif
VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )
DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
{
return setPfnUserCallback( reinterpret_cast<VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT>( pfnUserCallback_ ) );
}
#if defined( __clang__ ) || defined( __GNUC__ )
# pragma GCC diagnostic pop
#endif
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
}
operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
}
operator VkDeviceDeviceMemoryReportCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
}
operator VkDeviceDeviceMemoryReportCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT const &, VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pfnUserCallback, pUserData );
}
#endif
bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( pfnUserCallback == rhs.pfnUserCallback )
&& ( pUserData == rhs.pUserData );
#endif
}
bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT pfnUserCallback = {};
void * pUserData = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceDeviceMemoryReportCreateInfoEXT>
{
using Type = DeviceDeviceMemoryReportCreateInfoEXT;
};
// wrapper struct for struct VkDeviceDiagnosticsConfigCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceDiagnosticsConfigCreateInfoNV.html
struct DeviceDiagnosticsConfigCreateInfoNV
{
using NativeType = VkDeviceDiagnosticsConfigCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast<DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs ) )
{}
DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
}
operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
}
operator VkDeviceDiagnosticsConfigCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
}
operator VkDeviceDiagnosticsConfigCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default;
#else
bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceDiagnosticsConfigCreateInfoNV>
{
using Type = DeviceDiagnosticsConfigCreateInfoNV;
};
// wrapper struct for struct VkDeviceEventInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceEventInfoEXT.html
struct DeviceEventInfoEXT
{
using NativeType = VkDeviceEventInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT(VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceEvent{ deviceEvent_ }
{}
VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceEventInfoEXT( *reinterpret_cast<DeviceEventInfoEXT const *>( &rhs ) )
{}
DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
{
deviceEvent = deviceEvent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceEventInfoEXT*>( this );
}
operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceEventInfoEXT*>( this );
}
operator VkDeviceEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceEventInfoEXT*>( this );
}
operator VkDeviceEventInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceEventInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceEvent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceEventInfoEXT const & ) const = default;
#else
bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceEvent == rhs.deviceEvent );
#endif
}
bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
};
template <>
struct CppType<StructureType, StructureType::eDeviceEventInfoEXT>
{
using Type = DeviceEventInfoEXT;
};
// wrapper struct for struct VkDeviceFaultAddressInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultAddressInfoEXT.html
struct DeviceFaultAddressInfoEXT
{
using NativeType = VkDeviceFaultAddressInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT(VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone, VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ = {}) VULKAN_HPP_NOEXCEPT
: addressType{ addressType_ }, reportedAddress{ reportedAddress_ }, addressPrecision{ addressPrecision_ }
{}
VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceFaultAddressInfoEXT( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceFaultAddressInfoEXT( *reinterpret_cast<DeviceFaultAddressInfoEXT const *>( &rhs ) )
{}
DeviceFaultAddressInfoEXT & operator=( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceFaultAddressInfoEXT & operator=( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressType( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ ) VULKAN_HPP_NOEXCEPT
{
addressType = addressType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setReportedAddress( VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ ) VULKAN_HPP_NOEXCEPT
{
reportedAddress = reportedAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressPrecision( VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ ) VULKAN_HPP_NOEXCEPT
{
addressPrecision = addressPrecision_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceFaultAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceFaultAddressInfoEXT*>( this );
}
operator VkDeviceFaultAddressInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceFaultAddressInfoEXT*>( this );
}
operator VkDeviceFaultAddressInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceFaultAddressInfoEXT*>( this );
}
operator VkDeviceFaultAddressInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceFaultAddressInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( addressType, reportedAddress, addressPrecision );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceFaultAddressInfoEXT const & ) const = default;
#else
bool operator==( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( addressType == rhs.addressType )
&& ( reportedAddress == rhs.reportedAddress )
&& ( addressPrecision == rhs.addressPrecision );
#endif
}
bool operator!=( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone;
VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision = {};
};
// wrapper struct for struct VkDeviceFaultCountsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultCountsEXT.html
struct DeviceFaultCountsEXT
{
using NativeType = VkDeviceFaultCountsEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultCountsEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT(uint32_t addressInfoCount_ = {}, uint32_t vendorInfoCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, addressInfoCount{ addressInfoCount_ }, vendorInfoCount{ vendorInfoCount_ }, vendorBinarySize{ vendorBinarySize_ }
{}
VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceFaultCountsEXT( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceFaultCountsEXT( *reinterpret_cast<DeviceFaultCountsEXT const *>( &rhs ) )
{}
DeviceFaultCountsEXT & operator=( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceFaultCountsEXT & operator=( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setAddressInfoCount( uint32_t addressInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
addressInfoCount = addressInfoCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorInfoCount( uint32_t vendorInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
vendorInfoCount = vendorInfoCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorBinarySize( VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ ) VULKAN_HPP_NOEXCEPT
{
vendorBinarySize = vendorBinarySize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceFaultCountsEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceFaultCountsEXT*>( this );
}
operator VkDeviceFaultCountsEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceFaultCountsEXT*>( this );
}
operator VkDeviceFaultCountsEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceFaultCountsEXT*>( this );
}
operator VkDeviceFaultCountsEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceFaultCountsEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, addressInfoCount, vendorInfoCount, vendorBinarySize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceFaultCountsEXT const & ) const = default;
#else
bool operator==( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( addressInfoCount == rhs.addressInfoCount )
&& ( vendorInfoCount == rhs.vendorInfoCount )
&& ( vendorBinarySize == rhs.vendorBinarySize );
#endif
}
bool operator!=( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultCountsEXT;
void * pNext = {};
uint32_t addressInfoCount = {};
uint32_t vendorInfoCount = {};
VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceFaultCountsEXT>
{
using Type = DeviceFaultCountsEXT;
};
// wrapper struct for struct VkDeviceFaultVendorInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultVendorInfoEXT.html
struct DeviceFaultVendorInfoEXT
{
using NativeType = VkDeviceFaultVendorInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT(std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, uint64_t vendorFaultCode_ = {}, uint64_t vendorFaultData_ = {}) VULKAN_HPP_NOEXCEPT
: description{ description_ }, vendorFaultCode{ vendorFaultCode_ }, vendorFaultData{ vendorFaultData_ }
{}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceFaultVendorInfoEXT( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceFaultVendorInfoEXT( *reinterpret_cast<DeviceFaultVendorInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceFaultVendorInfoEXT( std::string const & description_, uint64_t vendorFaultCode_ = {}, uint64_t vendorFaultData_ = {} )
: vendorFaultCode( vendorFaultCode_ ), vendorFaultData( vendorFaultData_ )
{
VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE );
#if defined( _WIN32 )
strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() );
#else
strncpy( description, description_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, description_.size() ) );
#endif
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DeviceFaultVendorInfoEXT & operator=( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceFaultVendorInfoEXT & operator=( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setDescription( std::array<char,VK_MAX_DESCRIPTION_SIZE> description_ ) VULKAN_HPP_NOEXCEPT
{
description = description_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceFaultVendorInfoEXT & setDescription( std::string const & description_ ) VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE );
#if defined( _WIN32 )
strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() );
#else
strncpy( description, description_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, description_.size() ) );
#endif
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultCode( uint64_t vendorFaultCode_ ) VULKAN_HPP_NOEXCEPT
{
vendorFaultCode = vendorFaultCode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultData( uint64_t vendorFaultData_ ) VULKAN_HPP_NOEXCEPT
{
vendorFaultData = vendorFaultData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceFaultVendorInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceFaultVendorInfoEXT*>( this );
}
operator VkDeviceFaultVendorInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceFaultVendorInfoEXT*>( this );
}
operator VkDeviceFaultVendorInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceFaultVendorInfoEXT*>( this );
}
operator VkDeviceFaultVendorInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceFaultVendorInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, uint64_t const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( description, vendorFaultCode, vendorFaultData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = vendorFaultCode <=> rhs.vendorFaultCode; cmp != 0 ) return cmp;
if ( auto cmp = vendorFaultData <=> rhs.vendorFaultData; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( strcmp( description, rhs.description ) == 0 )
&& ( vendorFaultCode == rhs.vendorFaultCode )
&& ( vendorFaultData == rhs.vendorFaultData );
}
bool operator!=( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
uint64_t vendorFaultCode = {};
uint64_t vendorFaultData = {};
};
// wrapper struct for struct VkDeviceFaultInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultInfoEXT.html
struct DeviceFaultInfoEXT
{
using NativeType = VkDeviceFaultInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos_ = {},
VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos_ = {},
void * pVendorBinaryData_ = {},
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }
, description{ description_ }
, pAddressInfos{ pAddressInfos_ }
, pVendorInfos{ pVendorInfos_ }
, pVendorBinaryData{ pVendorBinaryData_ }
{
}
# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceFaultInfoEXT( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceFaultInfoEXT( *reinterpret_cast<DeviceFaultInfoEXT const *>( &rhs ) ) {}
DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
# else
DeviceFaultInfoEXT( DeviceFaultInfoEXT const & ) = delete;
DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & ) = delete;
DeviceFaultInfoEXT( DeviceFaultInfoEXT && rhs ) VULKAN_HPP_NOEXCEPT
: pNext{ rhs.pNext }
, pAddressInfos{ rhs.pAddressInfos }
, pVendorInfos{ rhs.pVendorInfos }
, pVendorBinaryData{ rhs.pVendorBinaryData }
{
memcpy( description, rhs.description, VK_MAX_DESCRIPTION_SIZE );
rhs.pNext = nullptr;
memset( rhs.description, 0, VK_MAX_DESCRIPTION_SIZE );
rhs.pAddressInfos = nullptr;
rhs.pVendorInfos = nullptr;
rhs.pVendorBinaryData = nullptr;
}
DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT && rhs ) VULKAN_HPP_NOEXCEPT
{
free( pAddressInfos );
free( pVendorInfos );
free( pVendorBinaryData );
pNext = rhs.pNext;
memcpy( description, rhs.description, VK_MAX_DESCRIPTION_SIZE );
pAddressInfos = rhs.pAddressInfos;
pVendorInfos = rhs.pVendorInfos;
pVendorBinaryData = rhs.pVendorBinaryData;
rhs.pNext = nullptr;
memset( rhs.description, 0, VK_MAX_DESCRIPTION_SIZE );
rhs.pAddressInfos = nullptr;
rhs.pVendorInfos = nullptr;
rhs.pVendorBinaryData = nullptr;
return *this;
}
~DeviceFaultInfoEXT() VULKAN_HPP_NOEXCEPT
{
free( pAddressInfos );
free( pVendorInfos );
free( pVendorBinaryData );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
operator VkDeviceFaultInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceFaultInfoEXT*>( this );
}
operator VkDeviceFaultInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceFaultInfoEXT*>( this );
}
operator VkDeviceFaultInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceFaultInfoEXT*>( this );
}
operator VkDeviceFaultInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceFaultInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * const &, VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, description, pAddressInfos, pVendorInfos, pVendorBinaryData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = pAddressInfos <=> rhs.pAddressInfos; cmp != 0 ) return cmp;
if ( auto cmp = pVendorInfos <=> rhs.pVendorInfos; cmp != 0 ) return cmp;
if ( auto cmp = pVendorBinaryData <=> rhs.pVendorBinaryData; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( strcmp( description, rhs.description ) == 0 )
&& ( pAddressInfos == rhs.pAddressInfos )
&& ( pVendorInfos == rhs.pVendorInfos )
&& ( pVendorBinaryData == rhs.pVendorBinaryData );
}
bool operator!=( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultInfoEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos = {};
VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos = {};
void * pVendorBinaryData = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceFaultInfoEXT>
{
using Type = DeviceFaultInfoEXT;
};
// wrapper struct for struct VkDeviceFaultVendorBinaryHeaderVersionOneEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultVendorBinaryHeaderVersionOneEXT.html
struct DeviceFaultVendorBinaryHeaderVersionOneEXT
{
using NativeType = VkDeviceFaultVendorBinaryHeaderVersionOneEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT(uint32_t headerSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, uint32_t driverVersion_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & pipelineCacheUUID_ = {}, uint32_t applicationNameOffset_ = {}, uint32_t applicationVersion_ = {}, uint32_t engineNameOffset_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {}) VULKAN_HPP_NOEXCEPT
: headerSize{ headerSize_ }, headerVersion{ headerVersion_ }, vendorID{ vendorID_ }, deviceID{ deviceID_ }, driverVersion{ driverVersion_ }, pipelineCacheUUID{ pipelineCacheUUID_ }, applicationNameOffset{ applicationNameOffset_ }, applicationVersion{ applicationVersion_ }, engineNameOffset{ engineNameOffset_ }, engineVersion{ engineVersion_ }, apiVersion{ apiVersion_ }
{}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceFaultVendorBinaryHeaderVersionOneEXT( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceFaultVendorBinaryHeaderVersionOneEXT( *reinterpret_cast<DeviceFaultVendorBinaryHeaderVersionOneEXT const *>( &rhs ) )
{}
DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT
{
headerSize = headerSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setHeaderVersion( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ ) VULKAN_HPP_NOEXCEPT
{
headerVersion = headerVersion_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT
{
vendorID = vendorID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT
{
deviceID = deviceID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDriverVersion( uint32_t driverVersion_ ) VULKAN_HPP_NOEXCEPT
{
driverVersion = driverVersion_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setPipelineCacheUUID( std::array<uint8_t,VK_UUID_SIZE> pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT
{
pipelineCacheUUID = pipelineCacheUUID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationNameOffset( uint32_t applicationNameOffset_ ) VULKAN_HPP_NOEXCEPT
{
applicationNameOffset = applicationNameOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
{
applicationVersion = applicationVersion_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineNameOffset( uint32_t engineNameOffset_ ) VULKAN_HPP_NOEXCEPT
{
engineNameOffset = engineNameOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT
{
engineVersion = engineVersion_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT
{
apiVersion = apiVersion_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceFaultVendorBinaryHeaderVersionOneEXT*>( this );
}
operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceFaultVendorBinaryHeaderVersionOneEXT*>( this );
}
operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceFaultVendorBinaryHeaderVersionOneEXT*>( this );
}
operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceFaultVendorBinaryHeaderVersionOneEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( headerSize, headerVersion, vendorID, deviceID, driverVersion, pipelineCacheUUID, applicationNameOffset, applicationVersion, engineNameOffset, engineVersion, apiVersion );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceFaultVendorBinaryHeaderVersionOneEXT const & ) const = default;
#else
bool operator==( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( headerSize == rhs.headerSize )
&& ( headerVersion == rhs.headerVersion )
&& ( vendorID == rhs.vendorID )
&& ( deviceID == rhs.deviceID )
&& ( driverVersion == rhs.driverVersion )
&& ( pipelineCacheUUID == rhs.pipelineCacheUUID )
&& ( applicationNameOffset == rhs.applicationNameOffset )
&& ( applicationVersion == rhs.applicationVersion )
&& ( engineNameOffset == rhs.engineNameOffset )
&& ( engineVersion == rhs.engineVersion )
&& ( apiVersion == rhs.apiVersion );
#endif
}
bool operator!=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t headerSize = {};
VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne;
uint32_t vendorID = {};
uint32_t deviceID = {};
uint32_t driverVersion = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
uint32_t applicationNameOffset = {};
uint32_t applicationVersion = {};
uint32_t engineNameOffset = {};
uint32_t engineVersion = {};
uint32_t apiVersion = {};
};
// wrapper struct for struct VkDeviceGroupBindSparseInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupBindSparseInfo.html
struct DeviceGroupBindSparseInfo
{
using NativeType = VkDeviceGroupBindSparseInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo(uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, resourceDeviceIndex{ resourceDeviceIndex_ }, memoryDeviceIndex{ memoryDeviceIndex_ }
{}
VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupBindSparseInfo( *reinterpret_cast<DeviceGroupBindSparseInfo const *>( &rhs ) )
{}
DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
{
resourceDeviceIndex = resourceDeviceIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
{
memoryDeviceIndex = memoryDeviceIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>( this );
}
operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupBindSparseInfo*>( this );
}
operator VkDeviceGroupBindSparseInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceGroupBindSparseInfo*>( this );
}
operator VkDeviceGroupBindSparseInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceGroupBindSparseInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, resourceDeviceIndex, memoryDeviceIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default;
#else
bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( resourceDeviceIndex == rhs.resourceDeviceIndex )
&& ( memoryDeviceIndex == rhs.memoryDeviceIndex );
#endif
}
bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
const void * pNext = {};
uint32_t resourceDeviceIndex = {};
uint32_t memoryDeviceIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceGroupBindSparseInfo>
{
using Type = DeviceGroupBindSparseInfo;
};
using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
// wrapper struct for struct VkDeviceGroupCommandBufferBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupCommandBufferBeginInfo.html
struct DeviceGroupCommandBufferBeginInfo
{
using NativeType = VkDeviceGroupCommandBufferBeginInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo(uint32_t deviceMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceMask{ deviceMask_ }
{}
VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupCommandBufferBeginInfo( *reinterpret_cast<DeviceGroupCommandBufferBeginInfo const *>( &rhs ) )
{}
DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
{
deviceMask = deviceMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>( this );
}
operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>( this );
}
operator VkDeviceGroupCommandBufferBeginInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>( this );
}
operator VkDeviceGroupCommandBufferBeginInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default;
#else
bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceMask == rhs.deviceMask );
#endif
}
bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
const void * pNext = {};
uint32_t deviceMask = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceGroupCommandBufferBeginInfo>
{
using Type = DeviceGroupCommandBufferBeginInfo;
};
using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
// wrapper struct for struct VkDeviceGroupDeviceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupDeviceCreateInfo.html
struct DeviceGroupDeviceCreateInfo
{
using NativeType = VkDeviceGroupDeviceCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo(uint32_t physicalDeviceCount_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, physicalDeviceCount{ physicalDeviceCount_ }, pPhysicalDevices{ pPhysicalDevices_ }
{}
VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupDeviceCreateInfo( *reinterpret_cast<DeviceGroupDeviceCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_, const void * pNext_ = nullptr )
: pNext( pNext_ ), physicalDeviceCount( static_cast<uint32_t>( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
{
physicalDeviceCount = physicalDeviceCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
{
pPhysicalDevices = pPhysicalDevices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceGroupDeviceCreateInfo & setPhysicalDevices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT
{
physicalDeviceCount = static_cast<uint32_t>( physicalDevices_.size() );
pPhysicalDevices = physicalDevices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo*>( this );
}
operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>( this );
}
operator VkDeviceGroupDeviceCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceGroupDeviceCreateInfo*>( this );
}
operator VkDeviceGroupDeviceCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PhysicalDevice * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, physicalDeviceCount, pPhysicalDevices );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default;
#else
bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( physicalDeviceCount == rhs.physicalDeviceCount )
&& ( pPhysicalDevices == rhs.pPhysicalDevices );
#endif
}
bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
const void * pNext = {};
uint32_t physicalDeviceCount = {};
const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceGroupDeviceCreateInfo>
{
using Type = DeviceGroupDeviceCreateInfo;
};
using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
// wrapper struct for struct VkDeviceGroupPresentCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupPresentCapabilitiesKHR.html
struct DeviceGroupPresentCapabilitiesKHR
{
using NativeType = VkDeviceGroupPresentCapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR(std::array<uint32_t,VK_MAX_DEVICE_GROUP_SIZE> const & presentMask_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentMask{ presentMask_ }, modes{ modes_ }
{}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast<DeviceGroupPresentCapabilitiesKHR const *>( &rhs ) )
{}
DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR*>( this );
}
operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( this );
}
operator VkDeviceGroupPresentCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR*>( this );
}
operator VkDeviceGroupPresentCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> const &, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentMask, modes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default;
#else
bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentMask == rhs.presentMask )
&& ( modes == rhs.modes );
#endif
}
bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> presentMask = {};
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceGroupPresentCapabilitiesKHR>
{
using Type = DeviceGroupPresentCapabilitiesKHR;
};
// wrapper struct for struct VkDeviceGroupPresentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupPresentInfoKHR.html
struct DeviceGroupPresentInfoKHR
{
using NativeType = VkDeviceGroupPresentInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR(uint32_t swapchainCount_ = {}, const uint32_t * pDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, swapchainCount{ swapchainCount_ }, pDeviceMasks{ pDeviceMasks_ }, mode{ mode_ }
{}
VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupPresentInfoKHR( *reinterpret_cast<DeviceGroupPresentInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, const void * pNext_ = nullptr )
: pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = swapchainCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceMasks = pDeviceMasks_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( deviceMasks_.size() );
pDeviceMasks = deviceMasks_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR*>( this );
}
operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupPresentInfoKHR*>( this );
}
operator VkDeviceGroupPresentInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceGroupPresentInfoKHR*>( this );
}
operator VkDeviceGroupPresentInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceGroupPresentInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, swapchainCount, pDeviceMasks, mode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default;
#else
bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchainCount == rhs.swapchainCount )
&& ( pDeviceMasks == rhs.pDeviceMasks )
&& ( mode == rhs.mode );
#endif
}
bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
const void * pNext = {};
uint32_t swapchainCount = {};
const uint32_t * pDeviceMasks = {};
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
};
template <>
struct CppType<StructureType, StructureType::eDeviceGroupPresentInfoKHR>
{
using Type = DeviceGroupPresentInfoKHR;
};
// wrapper struct for struct VkDeviceGroupRenderPassBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupRenderPassBeginInfo.html
struct DeviceGroupRenderPassBeginInfo
{
using NativeType = VkDeviceGroupRenderPassBeginInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo(uint32_t deviceMask_ = {}, uint32_t deviceRenderAreaCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceMask{ deviceMask_ }, deviceRenderAreaCount{ deviceRenderAreaCount_ }, pDeviceRenderAreas{ pDeviceRenderAreas_ }
{}
VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupRenderPassBeginInfo( *reinterpret_cast<DeviceGroupRenderPassBeginInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_, const void * pNext_ = nullptr )
: pNext( pNext_ ), deviceMask( deviceMask_ ), deviceRenderAreaCount( static_cast<uint32_t>( deviceRenderAreas_.size() ) ), pDeviceRenderAreas( deviceRenderAreas_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
{
deviceMask = deviceMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
{
deviceRenderAreaCount = deviceRenderAreaCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceRenderAreas = pDeviceRenderAreas_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
{
deviceRenderAreaCount = static_cast<uint32_t>( deviceRenderAreas_.size() );
pDeviceRenderAreas = deviceRenderAreas_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo*>( this );
}
operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>( this );
}
operator VkDeviceGroupRenderPassBeginInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo*>( this );
}
operator VkDeviceGroupRenderPassBeginInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceMask, deviceRenderAreaCount, pDeviceRenderAreas );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default;
#else
bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceMask == rhs.deviceMask )
&& ( deviceRenderAreaCount == rhs.deviceRenderAreaCount )
&& ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
#endif
}
bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
const void * pNext = {};
uint32_t deviceMask = {};
uint32_t deviceRenderAreaCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceGroupRenderPassBeginInfo>
{
using Type = DeviceGroupRenderPassBeginInfo;
};
using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
// wrapper struct for struct VkDeviceGroupSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupSubmitInfo.html
struct DeviceGroupSubmitInfo
{
using NativeType = VkDeviceGroupSubmitInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo(uint32_t waitSemaphoreCount_ = {}, const uint32_t * pWaitSemaphoreDeviceIndices_ = {}, uint32_t commandBufferCount_ = {}, const uint32_t * pCommandBufferDeviceMasks_ = {}, uint32_t signalSemaphoreCount_ = {}, const uint32_t * pSignalSemaphoreDeviceIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, waitSemaphoreCount{ waitSemaphoreCount_ }, pWaitSemaphoreDeviceIndices{ pWaitSemaphoreDeviceIndices_ }, commandBufferCount{ commandBufferCount_ }, pCommandBufferDeviceMasks{ pCommandBufferDeviceMasks_ }, signalSemaphoreCount{ signalSemaphoreCount_ }, pSignalSemaphoreDeviceIndices{ pSignalSemaphoreDeviceIndices_ }
{}
VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupSubmitInfo( *reinterpret_cast<DeviceGroupSubmitInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), waitSemaphoreCount( static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() ) ), pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ), commandBufferCount( static_cast<uint32_t>( commandBufferDeviceMasks_.size() ) ), pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() ) ), pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = waitSemaphoreCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() );
pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferCount = commandBufferCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
{
pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceGroupSubmitInfo & setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferCount = static_cast<uint32_t>( commandBufferDeviceMasks_.size() );
pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = signalSemaphoreCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() );
pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupSubmitInfo*>( this );
}
operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupSubmitInfo*>( this );
}
operator VkDeviceGroupSubmitInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceGroupSubmitInfo*>( this );
}
operator VkDeviceGroupSubmitInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceGroupSubmitInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphoreDeviceIndices, commandBufferCount, pCommandBufferDeviceMasks, signalSemaphoreCount, pSignalSemaphoreDeviceIndices );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupSubmitInfo const & ) const = default;
#else
bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( waitSemaphoreCount == rhs.waitSemaphoreCount )
&& ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices )
&& ( commandBufferCount == rhs.commandBufferCount )
&& ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks )
&& ( signalSemaphoreCount == rhs.signalSemaphoreCount )
&& ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
#endif
}
bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo;
const void * pNext = {};
uint32_t waitSemaphoreCount = {};
const uint32_t * pWaitSemaphoreDeviceIndices = {};
uint32_t commandBufferCount = {};
const uint32_t * pCommandBufferDeviceMasks = {};
uint32_t signalSemaphoreCount = {};
const uint32_t * pSignalSemaphoreDeviceIndices = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceGroupSubmitInfo>
{
using Type = DeviceGroupSubmitInfo;
};
using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
// wrapper struct for struct VkDeviceGroupSwapchainCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupSwapchainCreateInfoKHR.html
struct DeviceGroupSwapchainCreateInfoKHR
{
using NativeType = VkDeviceGroupSwapchainCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, modes{ modes_ }
{}
VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast<DeviceGroupSwapchainCreateInfoKHR const *>( &rhs ) )
{}
DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
{
modes = modes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR*>( this );
}
operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>( this );
}
operator VkDeviceGroupSwapchainCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR*>( this );
}
operator VkDeviceGroupSwapchainCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, modes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default;
#else
bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( modes == rhs.modes );
#endif
}
bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceGroupSwapchainCreateInfoKHR>
{
using Type = DeviceGroupSwapchainCreateInfoKHR;
};
// wrapper struct for struct VkImageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCreateInfo.html
struct ImageCreateInfo
{
using NativeType = VkImageCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageCreateInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, uint32_t mipLevels_ = {}, uint32_t arrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, imageType{ imageType_ }, format{ format_ }, extent{ extent_ }, mipLevels{ mipLevels_ }, arrayLayers{ arrayLayers_ }, samples{ samples_ }, tiling{ tiling_ }, usage{ usage_ }, sharingMode{ sharingMode_ }, queueFamilyIndexCount{ queueFamilyIndexCount_ }, pQueueFamilyIndices{ pQueueFamilyIndices_ }, initialLayout{ initialLayout_ }
{}
VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageCreateInfo( *reinterpret_cast<ImageCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageType imageType_, VULKAN_HPP_NAMESPACE::Format format_, VULKAN_HPP_NAMESPACE::Extent3D extent_, uint32_t mipLevels_, uint32_t arrayLayers_, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, VULKAN_HPP_NAMESPACE::ImageTiling tiling_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), initialLayout( initialLayout_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
{
imageType = imageType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT
{
mipLevels = mipLevels_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT
{
arrayLayers = arrayLayers_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
samples = samples_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
{
tiling = tiling_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
{
sharingMode = sharingMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = queueFamilyIndexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
pQueueFamilyIndices = pQueueFamilyIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
pQueueFamilyIndices = queueFamilyIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
{
initialLayout = initialLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageCreateInfo*>( this );
}
operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageCreateInfo*>( this );
}
operator VkImageCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageCreateInfo*>( this );
}
operator VkImageCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageCreateFlags const &, VULKAN_HPP_NAMESPACE::ImageType const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Extent3D const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::ImageTiling const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, imageType, format, extent, mipLevels, arrayLayers, samples, tiling, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices, initialLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageCreateInfo const & ) const = default;
#else
bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( imageType == rhs.imageType )
&& ( format == rhs.format )
&& ( extent == rhs.extent )
&& ( mipLevels == rhs.mipLevels )
&& ( arrayLayers == rhs.arrayLayers )
&& ( samples == rhs.samples )
&& ( tiling == rhs.tiling )
&& ( usage == rhs.usage )
&& ( sharingMode == rhs.sharingMode )
&& ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
&& ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
&& ( initialLayout == rhs.initialLayout );
#endif
}
bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D;
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
uint32_t mipLevels = {};
uint32_t arrayLayers = {};
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
uint32_t queueFamilyIndexCount = {};
const uint32_t * pQueueFamilyIndices = {};
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::eImageCreateInfo>
{
using Type = ImageCreateInfo;
};
// wrapper struct for struct VkDeviceImageMemoryRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceImageMemoryRequirements.html
struct DeviceImageMemoryRequirements
{
using NativeType = VkDeviceImageMemoryRequirements;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageMemoryRequirements;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pCreateInfo{ pCreateInfo_ }, planeAspect{ planeAspect_ }
{}
VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceImageMemoryRequirements( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceImageMemoryRequirements( *reinterpret_cast<DeviceImageMemoryRequirements const *>( &rhs ) )
{}
DeviceImageMemoryRequirements & operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceImageMemoryRequirements & operator=( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
{
pCreateInfo = pCreateInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
{
planeAspect = planeAspect_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceImageMemoryRequirements*>( this );
}
operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceImageMemoryRequirements*>( this );
}
operator VkDeviceImageMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceImageMemoryRequirements*>( this );
}
operator VkDeviceImageMemoryRequirements *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceImageMemoryRequirements*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::ImageCreateInfo * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pCreateInfo, planeAspect );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceImageMemoryRequirements const & ) const = default;
#else
bool operator==( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pCreateInfo == rhs.pCreateInfo )
&& ( planeAspect == rhs.planeAspect );
#endif
}
bool operator!=( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageMemoryRequirements;
const void * pNext = {};
const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {};
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
};
template <>
struct CppType<StructureType, StructureType::eDeviceImageMemoryRequirements>
{
using Type = DeviceImageMemoryRequirements;
};
using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements;
// wrapper struct for struct VkImageSubresource2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresource2.html
struct ImageSubresource2
{
using NativeType = VkImageSubresource2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageSubresource2(VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageSubresource{ imageSubresource_ }
{}
VULKAN_HPP_CONSTEXPR ImageSubresource2( ImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageSubresource2( VkImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageSubresource2( *reinterpret_cast<ImageSubresource2 const *>( &rhs ) )
{}
ImageSubresource2 & operator=( ImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageSubresource2 & operator=( VkImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
{
imageSubresource = imageSubresource_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageSubresource2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageSubresource2*>( this );
}
operator VkImageSubresource2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageSubresource2*>( this );
}
operator VkImageSubresource2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageSubresource2*>( this );
}
operator VkImageSubresource2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageSubresource2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageSubresource const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageSubresource );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageSubresource2 const & ) const = default;
#else
bool operator==( ImageSubresource2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageSubresource == rhs.imageSubresource );
#endif
}
bool operator!=( ImageSubresource2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource = {};
};
template <>
struct CppType<StructureType, StructureType::eImageSubresource2>
{
using Type = ImageSubresource2;
};
using ImageSubresource2EXT = ImageSubresource2;
using ImageSubresource2KHR = ImageSubresource2;
// wrapper struct for struct VkDeviceImageSubresourceInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceImageSubresourceInfo.html
struct DeviceImageSubresourceInfo
{
using NativeType = VkDeviceImageSubresourceInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageSubresourceInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfo(const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pCreateInfo{ pCreateInfo_ }, pSubresource{ pSubresource_ }
{}
VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfo( DeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceImageSubresourceInfo( VkDeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceImageSubresourceInfo( *reinterpret_cast<DeviceImageSubresourceInfo const *>( &rhs ) )
{}
DeviceImageSubresourceInfo & operator=( DeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceImageSubresourceInfo & operator=( VkDeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
{
pCreateInfo = pCreateInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPSubresource( const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource_ ) VULKAN_HPP_NOEXCEPT
{
pSubresource = pSubresource_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceImageSubresourceInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceImageSubresourceInfo*>( this );
}
operator VkDeviceImageSubresourceInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceImageSubresourceInfo*>( this );
}
operator VkDeviceImageSubresourceInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceImageSubresourceInfo*>( this );
}
operator VkDeviceImageSubresourceInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceImageSubresourceInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::ImageCreateInfo * const &, const VULKAN_HPP_NAMESPACE::ImageSubresource2 * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pCreateInfo, pSubresource );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceImageSubresourceInfo const & ) const = default;
#else
bool operator==( DeviceImageSubresourceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pCreateInfo == rhs.pCreateInfo )
&& ( pSubresource == rhs.pSubresource );
#endif
}
bool operator!=( DeviceImageSubresourceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageSubresourceInfo;
const void * pNext = {};
const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {};
const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceImageSubresourceInfo>
{
using Type = DeviceImageSubresourceInfo;
};
using DeviceImageSubresourceInfoKHR = DeviceImageSubresourceInfo;
// wrapper struct for struct VkDeviceMemoryOpaqueCaptureAddressInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryOpaqueCaptureAddressInfo.html
struct DeviceMemoryOpaqueCaptureAddressInfo
{
using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memory{ memory_ }
{}
VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast<DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs ) )
{}
DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
}
operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
}
operator VkDeviceMemoryOpaqueCaptureAddressInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
}
operator VkDeviceMemoryOpaqueCaptureAddressInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memory );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default;
#else
bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memory == rhs.memory );
#endif
}
bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceMemoryOpaqueCaptureAddressInfo>
{
using Type = DeviceMemoryOpaqueCaptureAddressInfo;
};
using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;
// wrapper struct for struct VkDeviceMemoryOverallocationCreateInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryOverallocationCreateInfoAMD.html
struct DeviceMemoryOverallocationCreateInfoAMD
{
using NativeType = VkDeviceMemoryOverallocationCreateInfoAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD(VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, overallocationBehavior{ overallocationBehavior_ }
{}
VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceMemoryOverallocationCreateInfoAMD( *reinterpret_cast<DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs ) )
{}
DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
{
overallocationBehavior = overallocationBehavior_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
}
operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
}
operator VkDeviceMemoryOverallocationCreateInfoAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
}
operator VkDeviceMemoryOverallocationCreateInfoAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, overallocationBehavior );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default;
#else
bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( overallocationBehavior == rhs.overallocationBehavior );
#endif
}
bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault;
};
template <>
struct CppType<StructureType, StructureType::eDeviceMemoryOverallocationCreateInfoAMD>
{
using Type = DeviceMemoryOverallocationCreateInfoAMD;
};
#if defined( VK_ENABLE_BETA_EXTENSIONS )
union DeviceOrHostAddressConstAMDX
{
using NativeType = VkDeviceOrHostAddressConstAMDX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
: deviceAddress( deviceAddress_ )
{}
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( const void * hostAddress_ )
: hostAddress( hostAddress_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
{
hostAddress = hostAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceOrHostAddressConstAMDX const &() const
{
return *reinterpret_cast<const VkDeviceOrHostAddressConstAMDX*>( this );
}
operator VkDeviceOrHostAddressConstAMDX &()
{
return *reinterpret_cast<VkDeviceOrHostAddressConstAMDX*>( this );
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
const void * hostAddress;
#else
VkDeviceAddress deviceAddress;
const void * hostAddress;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
// wrapper struct for struct VkDevicePipelineBinaryInternalCacheControlKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDevicePipelineBinaryInternalCacheControlKHR.html
struct DevicePipelineBinaryInternalCacheControlKHR
{
using NativeType = VkDevicePipelineBinaryInternalCacheControlKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePipelineBinaryInternalCacheControlKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DevicePipelineBinaryInternalCacheControlKHR(VULKAN_HPP_NAMESPACE::Bool32 disableInternalCache_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, disableInternalCache{ disableInternalCache_ }
{}
VULKAN_HPP_CONSTEXPR DevicePipelineBinaryInternalCacheControlKHR( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DevicePipelineBinaryInternalCacheControlKHR( VkDevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DevicePipelineBinaryInternalCacheControlKHR( *reinterpret_cast<DevicePipelineBinaryInternalCacheControlKHR const *>( &rhs ) )
{}
DevicePipelineBinaryInternalCacheControlKHR & operator=( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DevicePipelineBinaryInternalCacheControlKHR & operator=( VkDevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DevicePipelineBinaryInternalCacheControlKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR & setDisableInternalCache( VULKAN_HPP_NAMESPACE::Bool32 disableInternalCache_ ) VULKAN_HPP_NOEXCEPT
{
disableInternalCache = disableInternalCache_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDevicePipelineBinaryInternalCacheControlKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDevicePipelineBinaryInternalCacheControlKHR*>( this );
}
operator VkDevicePipelineBinaryInternalCacheControlKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDevicePipelineBinaryInternalCacheControlKHR*>( this );
}
operator VkDevicePipelineBinaryInternalCacheControlKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDevicePipelineBinaryInternalCacheControlKHR*>( this );
}
operator VkDevicePipelineBinaryInternalCacheControlKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDevicePipelineBinaryInternalCacheControlKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, disableInternalCache );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DevicePipelineBinaryInternalCacheControlKHR const & ) const = default;
#else
bool operator==( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( disableInternalCache == rhs.disableInternalCache );
#endif
}
bool operator!=( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePipelineBinaryInternalCacheControlKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 disableInternalCache = {};
};
template <>
struct CppType<StructureType, StructureType::eDevicePipelineBinaryInternalCacheControlKHR>
{
using Type = DevicePipelineBinaryInternalCacheControlKHR;
};
// wrapper struct for struct VkDevicePrivateDataCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDevicePrivateDataCreateInfo.html
struct DevicePrivateDataCreateInfo
{
using NativeType = VkDevicePrivateDataCreateInfo;
static const bool allowDuplicate = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo(uint32_t privateDataSlotRequestCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, privateDataSlotRequestCount{ privateDataSlotRequestCount_ }
{}
VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DevicePrivateDataCreateInfo( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DevicePrivateDataCreateInfo( *reinterpret_cast<DevicePrivateDataCreateInfo const *>( &rhs ) )
{}
DevicePrivateDataCreateInfo & operator=( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DevicePrivateDataCreateInfo & operator=( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT
{
privateDataSlotRequestCount = privateDataSlotRequestCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDevicePrivateDataCreateInfo*>( this );
}
operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDevicePrivateDataCreateInfo*>( this );
}
operator VkDevicePrivateDataCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDevicePrivateDataCreateInfo*>( this );
}
operator VkDevicePrivateDataCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDevicePrivateDataCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, privateDataSlotRequestCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DevicePrivateDataCreateInfo const & ) const = default;
#else
bool operator==( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount );
#endif
}
bool operator!=( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfo;
const void * pNext = {};
uint32_t privateDataSlotRequestCount = {};
};
template <>
struct CppType<StructureType, StructureType::eDevicePrivateDataCreateInfo>
{
using Type = DevicePrivateDataCreateInfo;
};
using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo;
// wrapper struct for struct VkDeviceQueueGlobalPriorityCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueGlobalPriorityCreateInfo.html
struct DeviceQueueGlobalPriorityCreateInfo
{
using NativeType = VkDeviceQueueGlobalPriorityCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfo(VULKAN_HPP_NAMESPACE::QueueGlobalPriority globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, globalPriority{ globalPriority_ }
{}
VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfo( DeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceQueueGlobalPriorityCreateInfo( VkDeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceQueueGlobalPriorityCreateInfo( *reinterpret_cast<DeviceQueueGlobalPriorityCreateInfo const *>( &rhs ) )
{}
DeviceQueueGlobalPriorityCreateInfo & operator=( DeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceQueueGlobalPriorityCreateInfo & operator=( VkDeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriority globalPriority_ ) VULKAN_HPP_NOEXCEPT
{
globalPriority = globalPriority_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceQueueGlobalPriorityCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfo*>( this );
}
operator VkDeviceQueueGlobalPriorityCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfo*>( this );
}
operator VkDeviceQueueGlobalPriorityCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfo*>( this );
}
operator VkDeviceQueueGlobalPriorityCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::QueueGlobalPriority const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, globalPriority );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceQueueGlobalPriorityCreateInfo const & ) const = default;
#else
bool operator==( DeviceQueueGlobalPriorityCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( globalPriority == rhs.globalPriority );
#endif
}
bool operator!=( DeviceQueueGlobalPriorityCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::QueueGlobalPriority globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow;
};
template <>
struct CppType<StructureType, StructureType::eDeviceQueueGlobalPriorityCreateInfo>
{
using Type = DeviceQueueGlobalPriorityCreateInfo;
};
using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfo;
using DeviceQueueGlobalPriorityCreateInfoKHR = DeviceQueueGlobalPriorityCreateInfo;
// wrapper struct for struct VkDeviceQueueInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueInfo2.html
struct DeviceQueueInfo2
{
using NativeType = VkDeviceQueueInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceQueueInfo2(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, queueFamilyIndex{ queueFamilyIndex_ }, queueIndex{ queueIndex_ }
{}
VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceQueueInfo2( *reinterpret_cast<DeviceQueueInfo2 const *>( &rhs ) )
{}
DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndex = queueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueIndex = queueIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceQueueInfo2*>( this );
}
operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceQueueInfo2*>( this );
}
operator VkDeviceQueueInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceQueueInfo2*>( this );
}
operator VkDeviceQueueInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceQueueInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, queueFamilyIndex, queueIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceQueueInfo2 const & ) const = default;
#else
bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( queueFamilyIndex == rhs.queueFamilyIndex )
&& ( queueIndex == rhs.queueIndex );
#endif
}
bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
uint32_t queueFamilyIndex = {};
uint32_t queueIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceQueueInfo2>
{
using Type = DeviceQueueInfo2;
};
// wrapper struct for struct VkDeviceQueueShaderCoreControlCreateInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueShaderCoreControlCreateInfoARM.html
struct DeviceQueueShaderCoreControlCreateInfoARM
{
using NativeType = VkDeviceQueueShaderCoreControlCreateInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM(uint32_t shaderCoreCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderCoreCount{ shaderCoreCount_ }
{}
VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceQueueShaderCoreControlCreateInfoARM( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceQueueShaderCoreControlCreateInfoARM( *reinterpret_cast<DeviceQueueShaderCoreControlCreateInfoARM const *>( &rhs ) )
{}
DeviceQueueShaderCoreControlCreateInfoARM & operator=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceQueueShaderCoreControlCreateInfoARM & operator=( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setShaderCoreCount( uint32_t shaderCoreCount_ ) VULKAN_HPP_NOEXCEPT
{
shaderCoreCount = shaderCoreCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceQueueShaderCoreControlCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceQueueShaderCoreControlCreateInfoARM*>( this );
}
operator VkDeviceQueueShaderCoreControlCreateInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceQueueShaderCoreControlCreateInfoARM*>( this );
}
operator VkDeviceQueueShaderCoreControlCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceQueueShaderCoreControlCreateInfoARM*>( this );
}
operator VkDeviceQueueShaderCoreControlCreateInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceQueueShaderCoreControlCreateInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderCoreCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceQueueShaderCoreControlCreateInfoARM const & ) const = default;
#else
bool operator==( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderCoreCount == rhs.shaderCoreCount );
#endif
}
bool operator!=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM;
void * pNext = {};
uint32_t shaderCoreCount = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceQueueShaderCoreControlCreateInfoARM>
{
using Type = DeviceQueueShaderCoreControlCreateInfoARM;
};
// wrapper struct for struct VkTensorDescriptionARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorDescriptionARM.html
struct TensorDescriptionARM
{
using NativeType = VkTensorDescriptionARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorDescriptionARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TensorDescriptionARM(VULKAN_HPP_NAMESPACE::TensorTilingARM tiling_ = VULKAN_HPP_NAMESPACE::TensorTilingARM::eOptimal, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t dimensionCount_ = {}, const int64_t * pDimensions_ = {}, const int64_t * pStrides_ = {}, VULKAN_HPP_NAMESPACE::TensorUsageFlagsARM usage_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tiling{ tiling_ }, format{ format_ }, dimensionCount{ dimensionCount_ }, pDimensions{ pDimensions_ }, pStrides{ pStrides_ }, usage{ usage_ }
{}
VULKAN_HPP_CONSTEXPR TensorDescriptionARM( TensorDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TensorDescriptionARM( VkTensorDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT
: TensorDescriptionARM( *reinterpret_cast<TensorDescriptionARM const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
TensorDescriptionARM( VULKAN_HPP_NAMESPACE::TensorTilingARM tiling_, VULKAN_HPP_NAMESPACE::Format format_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int64_t> const & dimensions_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int64_t> const & strides_ = {}, VULKAN_HPP_NAMESPACE::TensorUsageFlagsARM usage_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), tiling( tiling_ ), format( format_ ), dimensionCount( static_cast<uint32_t>( dimensions_.size() ) ), pDimensions( dimensions_.data() ), pStrides( strides_.data() ), usage( usage_ )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( strides_.empty() || ( dimensions_.size() == strides_.size() ) );
#else
if ( !strides_.empty() && ( dimensions_.size() != strides_.size() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::TensorDescriptionARM::TensorDescriptionARM: !strides_.empty() && ( dimensions_.size() != strides_.size() )" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
TensorDescriptionARM & operator=( TensorDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TensorDescriptionARM & operator=( VkTensorDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TensorDescriptionARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setTiling( VULKAN_HPP_NAMESPACE::TensorTilingARM tiling_ ) VULKAN_HPP_NOEXCEPT
{
tiling = tiling_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setDimensionCount( uint32_t dimensionCount_ ) VULKAN_HPP_NOEXCEPT
{
dimensionCount = dimensionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setPDimensions( const int64_t * pDimensions_ ) VULKAN_HPP_NOEXCEPT
{
pDimensions = pDimensions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
TensorDescriptionARM & setDimensions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int64_t> const & dimensions_ ) VULKAN_HPP_NOEXCEPT
{
dimensionCount = static_cast<uint32_t>( dimensions_.size() );
pDimensions = dimensions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setPStrides( const int64_t * pStrides_ ) VULKAN_HPP_NOEXCEPT
{
pStrides = pStrides_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
TensorDescriptionARM & setStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int64_t> const & strides_ ) VULKAN_HPP_NOEXCEPT
{
dimensionCount = static_cast<uint32_t>( strides_.size() );
pStrides = strides_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setUsage( VULKAN_HPP_NAMESPACE::TensorUsageFlagsARM usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTensorDescriptionARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTensorDescriptionARM*>( this );
}
operator VkTensorDescriptionARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTensorDescriptionARM*>( this );
}
operator VkTensorDescriptionARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTensorDescriptionARM*>( this );
}
operator VkTensorDescriptionARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTensorDescriptionARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TensorTilingARM const &, VULKAN_HPP_NAMESPACE::Format const &, uint32_t const &, const int64_t * const &, const int64_t * const &, VULKAN_HPP_NAMESPACE::TensorUsageFlagsARM const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tiling, format, dimensionCount, pDimensions, pStrides, usage );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TensorDescriptionARM const & ) const = default;
#else
bool operator==( TensorDescriptionARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tiling == rhs.tiling )
&& ( format == rhs.format )
&& ( dimensionCount == rhs.dimensionCount )
&& ( pDimensions == rhs.pDimensions )
&& ( pStrides == rhs.pStrides )
&& ( usage == rhs.usage );
#endif
}
bool operator!=( TensorDescriptionARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTensorDescriptionARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TensorTilingARM tiling = VULKAN_HPP_NAMESPACE::TensorTilingARM::eOptimal;
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
uint32_t dimensionCount = {};
const int64_t * pDimensions = {};
const int64_t * pStrides = {};
VULKAN_HPP_NAMESPACE::TensorUsageFlagsARM usage = {};
};
template <>
struct CppType<StructureType, StructureType::eTensorDescriptionARM>
{
using Type = TensorDescriptionARM;
};
// wrapper struct for struct VkTensorCreateInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorCreateInfoARM.html
struct TensorCreateInfoARM
{
using NativeType = VkTensorCreateInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorCreateInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TensorCreateInfoARM(VULKAN_HPP_NAMESPACE::TensorCreateFlagsARM flags_ = {}, const VULKAN_HPP_NAMESPACE::TensorDescriptionARM * pDescription_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pDescription{ pDescription_ }, sharingMode{ sharingMode_ }, queueFamilyIndexCount{ queueFamilyIndexCount_ }, pQueueFamilyIndices{ pQueueFamilyIndices_ }
{}
VULKAN_HPP_CONSTEXPR TensorCreateInfoARM( TensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TensorCreateInfoARM( VkTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: TensorCreateInfoARM( *reinterpret_cast<TensorCreateInfoARM const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
TensorCreateInfoARM( VULKAN_HPP_NAMESPACE::TensorCreateFlagsARM flags_, const VULKAN_HPP_NAMESPACE::TensorDescriptionARM * pDescription_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), pDescription( pDescription_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
TensorCreateInfoARM & operator=( TensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TensorCreateInfoARM & operator=( VkTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TensorCreateInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setFlags( VULKAN_HPP_NAMESPACE::TensorCreateFlagsARM flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setPDescription( const VULKAN_HPP_NAMESPACE::TensorDescriptionARM * pDescription_ ) VULKAN_HPP_NOEXCEPT
{
pDescription = pDescription_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
{
sharingMode = sharingMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = queueFamilyIndexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
pQueueFamilyIndices = pQueueFamilyIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
TensorCreateInfoARM & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
pQueueFamilyIndices = queueFamilyIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTensorCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTensorCreateInfoARM*>( this );
}
operator VkTensorCreateInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTensorCreateInfoARM*>( this );
}
operator VkTensorCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTensorCreateInfoARM*>( this );
}
operator VkTensorCreateInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTensorCreateInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TensorCreateFlagsARM const &, const VULKAN_HPP_NAMESPACE::TensorDescriptionARM * const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pDescription, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TensorCreateInfoARM const & ) const = default;
#else
bool operator==( TensorCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( pDescription == rhs.pDescription )
&& ( sharingMode == rhs.sharingMode )
&& ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
&& ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
#endif
}
bool operator!=( TensorCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTensorCreateInfoARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TensorCreateFlagsARM flags = {};
const VULKAN_HPP_NAMESPACE::TensorDescriptionARM * pDescription = {};
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
uint32_t queueFamilyIndexCount = {};
const uint32_t * pQueueFamilyIndices = {};
};
template <>
struct CppType<StructureType, StructureType::eTensorCreateInfoARM>
{
using Type = TensorCreateInfoARM;
};
// wrapper struct for struct VkDeviceTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceTensorMemoryRequirementsARM.html
struct DeviceTensorMemoryRequirementsARM
{
using NativeType = VkDeviceTensorMemoryRequirementsARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceTensorMemoryRequirementsARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceTensorMemoryRequirementsARM(const VULKAN_HPP_NAMESPACE::TensorCreateInfoARM * pCreateInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pCreateInfo{ pCreateInfo_ }
{}
VULKAN_HPP_CONSTEXPR DeviceTensorMemoryRequirementsARM( DeviceTensorMemoryRequirementsARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceTensorMemoryRequirementsARM( VkDeviceTensorMemoryRequirementsARM const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceTensorMemoryRequirementsARM( *reinterpret_cast<DeviceTensorMemoryRequirementsARM const *>( &rhs ) )
{}
DeviceTensorMemoryRequirementsARM & operator=( DeviceTensorMemoryRequirementsARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DeviceTensorMemoryRequirementsARM & operator=( VkDeviceTensorMemoryRequirementsARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DeviceTensorMemoryRequirementsARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceTensorMemoryRequirementsARM & setPCreateInfo( const VULKAN_HPP_NAMESPACE::TensorCreateInfoARM * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
{
pCreateInfo = pCreateInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDeviceTensorMemoryRequirementsARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceTensorMemoryRequirementsARM*>( this );
}
operator VkDeviceTensorMemoryRequirementsARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceTensorMemoryRequirementsARM*>( this );
}
operator VkDeviceTensorMemoryRequirementsARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDeviceTensorMemoryRequirementsARM*>( this );
}
operator VkDeviceTensorMemoryRequirementsARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDeviceTensorMemoryRequirementsARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::TensorCreateInfoARM * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pCreateInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DeviceTensorMemoryRequirementsARM const & ) const = default;
#else
bool operator==( DeviceTensorMemoryRequirementsARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pCreateInfo == rhs.pCreateInfo );
#endif
}
bool operator!=( DeviceTensorMemoryRequirementsARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceTensorMemoryRequirementsARM;
const void * pNext = {};
const VULKAN_HPP_NAMESPACE::TensorCreateInfoARM * pCreateInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eDeviceTensorMemoryRequirementsARM>
{
using Type = DeviceTensorMemoryRequirementsARM;
};
typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetInstanceProcAddrLUNARG)( VULKAN_HPP_NAMESPACE::Instance instance, const char * pName );
// wrapper struct for struct VkDirectDriverLoadingInfoLUNARG, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectDriverLoadingInfoLUNARG.html
struct DirectDriverLoadingInfoLUNARG
{
using NativeType = VkDirectDriverLoadingInfoLUNARG;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectDriverLoadingInfoLUNARG;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG(VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags_ = {}, VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pfnGetInstanceProcAddr{ pfnGetInstanceProcAddr_ }
{}
VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DirectDriverLoadingInfoLUNARG( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT
: DirectDriverLoadingInfoLUNARG( *reinterpret_cast<DirectDriverLoadingInfoLUNARG const *>( &rhs ) )
{}
DirectDriverLoadingInfoLUNARG & operator=( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DirectDriverLoadingInfoLUNARG & operator=( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setFlags( VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setPfnGetInstanceProcAddr( VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ ) VULKAN_HPP_NOEXCEPT
{
pfnGetInstanceProcAddr = pfnGetInstanceProcAddr_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDirectDriverLoadingInfoLUNARG const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDirectDriverLoadingInfoLUNARG*>( this );
}
operator VkDirectDriverLoadingInfoLUNARG &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDirectDriverLoadingInfoLUNARG*>( this );
}
operator VkDirectDriverLoadingInfoLUNARG const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDirectDriverLoadingInfoLUNARG*>( this );
}
operator VkDirectDriverLoadingInfoLUNARG *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDirectDriverLoadingInfoLUNARG*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG const &, VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pfnGetInstanceProcAddr );
}
#endif
bool operator==( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( pfnGetInstanceProcAddr == rhs.pfnGetInstanceProcAddr );
#endif
}
bool operator!=( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectDriverLoadingInfoLUNARG;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags = {};
VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr = {};
};
template <>
struct CppType<StructureType, StructureType::eDirectDriverLoadingInfoLUNARG>
{
using Type = DirectDriverLoadingInfoLUNARG;
};
// wrapper struct for struct VkDirectDriverLoadingListLUNARG, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectDriverLoadingListLUNARG.html
struct DirectDriverLoadingListLUNARG
{
using NativeType = VkDirectDriverLoadingListLUNARG;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectDriverLoadingListLUNARG;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG(VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_ = VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG::eExclusive, uint32_t driverCount_ = {}, const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, mode{ mode_ }, driverCount{ driverCount_ }, pDrivers{ pDrivers_ }
{}
VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DirectDriverLoadingListLUNARG( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT
: DirectDriverLoadingListLUNARG( *reinterpret_cast<DirectDriverLoadingListLUNARG const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DirectDriverLoadingListLUNARG( VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG> const & drivers_, const void * pNext_ = nullptr )
: pNext( pNext_ ), mode( mode_ ), driverCount( static_cast<uint32_t>( drivers_.size() ) ), pDrivers( drivers_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
DirectDriverLoadingListLUNARG & operator=( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DirectDriverLoadingListLUNARG & operator=( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setMode( VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setDriverCount( uint32_t driverCount_ ) VULKAN_HPP_NOEXCEPT
{
driverCount = driverCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setPDrivers( const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers_ ) VULKAN_HPP_NOEXCEPT
{
pDrivers = pDrivers_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DirectDriverLoadingListLUNARG & setDrivers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG> const & drivers_ ) VULKAN_HPP_NOEXCEPT
{
driverCount = static_cast<uint32_t>( drivers_.size() );
pDrivers = drivers_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDirectDriverLoadingListLUNARG const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDirectDriverLoadingListLUNARG*>( this );
}
operator VkDirectDriverLoadingListLUNARG &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDirectDriverLoadingListLUNARG*>( this );
}
operator VkDirectDriverLoadingListLUNARG const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDirectDriverLoadingListLUNARG*>( this );
}
operator VkDirectDriverLoadingListLUNARG *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDirectDriverLoadingListLUNARG*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, mode, driverCount, pDrivers );
}
#endif
bool operator==( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( mode == rhs.mode )
&& ( driverCount == rhs.driverCount )
&& ( pDrivers == rhs.pDrivers );
#endif
}
bool operator!=( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectDriverLoadingListLUNARG;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode = VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG::eExclusive;
uint32_t driverCount = {};
const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers = {};
};
template <>
struct CppType<StructureType, StructureType::eDirectDriverLoadingListLUNARG>
{
using Type = DirectDriverLoadingListLUNARG;
};
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
// wrapper struct for struct VkDirectFBSurfaceCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectFBSurfaceCreateInfoEXT.html
struct DirectFBSurfaceCreateInfoEXT
{
using NativeType = VkDirectFBSurfaceCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, IDirectFB * dfb_ = {}, IDirectFBSurface * surface_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, dfb{ dfb_ }, surface{ surface_ }
{}
VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DirectFBSurfaceCreateInfoEXT( *reinterpret_cast<DirectFBSurfaceCreateInfoEXT const *>( &rhs ) )
{}
DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) VULKAN_HPP_NOEXCEPT
{
dfb = dfb_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) VULKAN_HPP_NOEXCEPT
{
surface = surface_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT*>( this );
}
operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDirectFBSurfaceCreateInfoEXT*>( this );
}
operator VkDirectFBSurfaceCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT*>( this );
}
operator VkDirectFBSurfaceCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDirectFBSurfaceCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT const &, IDirectFB * const &, IDirectFBSurface * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, dfb, surface );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default;
#else
bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( dfb == rhs.dfb )
&& ( surface == rhs.surface );
#endif
}
bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {};
IDirectFB * dfb = {};
IDirectFBSurface * surface = {};
};
template <>
struct CppType<StructureType, StructureType::eDirectfbSurfaceCreateInfoEXT>
{
using Type = DirectFBSurfaceCreateInfoEXT;
};
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkDispatchGraphCountInfoAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchGraphCountInfoAMDX.html
struct DispatchGraphCountInfoAMDX
{
using NativeType = VkDispatchGraphCountInfoAMDX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX(uint32_t count_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos_ = {}, uint64_t stride_ = {}) VULKAN_HPP_NOEXCEPT
: count{ count_ }, infos{ infos_ }, stride{ stride_ }
{}
VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DispatchGraphCountInfoAMDX( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
: DispatchGraphCountInfoAMDX( *reinterpret_cast<DispatchGraphCountInfoAMDX const *>( &rhs ) )
{}
DispatchGraphCountInfoAMDX & operator=( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DispatchGraphCountInfoAMDX & operator=( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT
{
count = count_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setInfos( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const & infos_ ) VULKAN_HPP_NOEXCEPT
{
infos = infos_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setStride( uint64_t stride_ ) VULKAN_HPP_NOEXCEPT
{
stride = stride_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDispatchGraphCountInfoAMDX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDispatchGraphCountInfoAMDX*>( this );
}
operator VkDispatchGraphCountInfoAMDX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDispatchGraphCountInfoAMDX*>( this );
}
operator VkDispatchGraphCountInfoAMDX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDispatchGraphCountInfoAMDX*>( this );
}
operator VkDispatchGraphCountInfoAMDX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDispatchGraphCountInfoAMDX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( count, infos, stride );
}
#endif
public:
uint32_t count = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos = {};
uint64_t stride = {};
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkDispatchGraphInfoAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchGraphInfoAMDX.html
struct DispatchGraphInfoAMDX
{
using NativeType = VkDispatchGraphInfoAMDX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX(uint32_t nodeIndex_ = {}, uint32_t payloadCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads_ = {}, uint64_t payloadStride_ = {}) VULKAN_HPP_NOEXCEPT
: nodeIndex{ nodeIndex_ }, payloadCount{ payloadCount_ }, payloads{ payloads_ }, payloadStride{ payloadStride_ }
{}
VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DispatchGraphInfoAMDX( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
: DispatchGraphInfoAMDX( *reinterpret_cast<DispatchGraphInfoAMDX const *>( &rhs ) )
{}
DispatchGraphInfoAMDX & operator=( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DispatchGraphInfoAMDX & operator=( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setNodeIndex( uint32_t nodeIndex_ ) VULKAN_HPP_NOEXCEPT
{
nodeIndex = nodeIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadCount( uint32_t payloadCount_ ) VULKAN_HPP_NOEXCEPT
{
payloadCount = payloadCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloads( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const & payloads_ ) VULKAN_HPP_NOEXCEPT
{
payloads = payloads_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadStride( uint64_t payloadStride_ ) VULKAN_HPP_NOEXCEPT
{
payloadStride = payloadStride_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDispatchGraphInfoAMDX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDispatchGraphInfoAMDX*>( this );
}
operator VkDispatchGraphInfoAMDX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDispatchGraphInfoAMDX*>( this );
}
operator VkDispatchGraphInfoAMDX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDispatchGraphInfoAMDX*>( this );
}
operator VkDispatchGraphInfoAMDX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDispatchGraphInfoAMDX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( nodeIndex, payloadCount, payloads, payloadStride );
}
#endif
public:
uint32_t nodeIndex = {};
uint32_t payloadCount = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads = {};
uint64_t payloadStride = {};
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
// wrapper struct for struct VkDispatchIndirectCommand, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchIndirectCommand.html
struct DispatchIndirectCommand
{
using NativeType = VkDispatchIndirectCommand;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DispatchIndirectCommand(uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {}) VULKAN_HPP_NOEXCEPT
: x{ x_ }, y{ y_ }, z{ z_ }
{}
VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
: DispatchIndirectCommand( *reinterpret_cast<DispatchIndirectCommand const *>( &rhs ) )
{}
DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT
{
x = x_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT
{
y = y_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT
{
z = z_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDispatchIndirectCommand*>( this );
}
operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDispatchIndirectCommand*>( this );
}
operator VkDispatchIndirectCommand const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDispatchIndirectCommand*>( this );
}
operator VkDispatchIndirectCommand *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDispatchIndirectCommand*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( x, y, z );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DispatchIndirectCommand const & ) const = default;
#else
bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( x == rhs.x )
&& ( y == rhs.y )
&& ( z == rhs.z );
#endif
}
bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t x = {};
uint32_t y = {};
uint32_t z = {};
};
// wrapper struct for struct VkDispatchTileInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchTileInfoQCOM.html
struct DispatchTileInfoQCOM
{
using NativeType = VkDispatchTileInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDispatchTileInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DispatchTileInfoQCOM(const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }
{}
VULKAN_HPP_CONSTEXPR DispatchTileInfoQCOM( DispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DispatchTileInfoQCOM( VkDispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: DispatchTileInfoQCOM( *reinterpret_cast<DispatchTileInfoQCOM const *>( &rhs ) )
{}
DispatchTileInfoQCOM & operator=( DispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DispatchTileInfoQCOM & operator=( VkDispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchTileInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DispatchTileInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDispatchTileInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDispatchTileInfoQCOM*>( this );
}
operator VkDispatchTileInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDispatchTileInfoQCOM*>( this );
}
operator VkDispatchTileInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDispatchTileInfoQCOM*>( this );
}
operator VkDispatchTileInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDispatchTileInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DispatchTileInfoQCOM const & ) const = default;
#else
bool operator==( DispatchTileInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext );
#endif
}
bool operator!=( DispatchTileInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDispatchTileInfoQCOM;
const void * pNext = {};
};
template <>
struct CppType<StructureType, StructureType::eDispatchTileInfoQCOM>
{
using Type = DispatchTileInfoQCOM;
};
// wrapper struct for struct VkDisplayEventInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayEventInfoEXT.html
struct DisplayEventInfoEXT
{
using NativeType = VkDisplayEventInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT(VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, displayEvent{ displayEvent_ }
{}
VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayEventInfoEXT( *reinterpret_cast<DisplayEventInfoEXT const *>( &rhs ) )
{}
DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
{
displayEvent = displayEvent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayEventInfoEXT*>( this );
}
operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayEventInfoEXT*>( this );
}
operator VkDisplayEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayEventInfoEXT*>( this );
}
operator VkDisplayEventInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayEventInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, displayEvent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayEventInfoEXT const & ) const = default;
#else
bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( displayEvent == rhs.displayEvent );
#endif
}
bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut;
};
template <>
struct CppType<StructureType, StructureType::eDisplayEventInfoEXT>
{
using Type = DisplayEventInfoEXT;
};
// wrapper struct for struct VkDisplayModeParametersKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeParametersKHR.html
struct DisplayModeParametersKHR
{
using NativeType = VkDisplayModeParametersKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR(VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {}) VULKAN_HPP_NOEXCEPT
: visibleRegion{ visibleRegion_ }, refreshRate{ refreshRate_ }
{}
VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayModeParametersKHR( *reinterpret_cast<DisplayModeParametersKHR const *>( &rhs ) )
{}
DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT
{
visibleRegion = visibleRegion_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT
{
refreshRate = refreshRate_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayModeParametersKHR*>( this );
}
operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayModeParametersKHR*>( this );
}
operator VkDisplayModeParametersKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayModeParametersKHR*>( this );
}
operator VkDisplayModeParametersKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayModeParametersKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( visibleRegion, refreshRate );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayModeParametersKHR const & ) const = default;
#else
bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( visibleRegion == rhs.visibleRegion )
&& ( refreshRate == rhs.refreshRate );
#endif
}
bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {};
uint32_t refreshRate = {};
};
// wrapper struct for struct VkDisplayModeCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeCreateInfoKHR.html
struct DisplayModeCreateInfoKHR
{
using NativeType = VkDisplayModeCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, parameters{ parameters_ }
{}
VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayModeCreateInfoKHR( *reinterpret_cast<DisplayModeCreateInfoKHR const *>( &rhs ) )
{}
DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT
{
parameters = parameters_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( this );
}
operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayModeCreateInfoKHR*>( this );
}
operator VkDisplayModeCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( this );
}
operator VkDisplayModeCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayModeCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, parameters );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default;
#else
bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( parameters == rhs.parameters );
#endif
}
bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
};
template <>
struct CppType<StructureType, StructureType::eDisplayModeCreateInfoKHR>
{
using Type = DisplayModeCreateInfoKHR;
};
// wrapper struct for struct VkDisplayModePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModePropertiesKHR.html
struct DisplayModePropertiesKHR
{
using NativeType = VkDisplayModePropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT
: displayMode{ displayMode_ }, parameters{ parameters_ }
{}
VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayModePropertiesKHR( *reinterpret_cast<DisplayModePropertiesKHR const *>( &rhs ) )
{}
DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const *>( &rhs );
return *this;
}
operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayModePropertiesKHR*>( this );
}
operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayModePropertiesKHR*>( this );
}
operator VkDisplayModePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayModePropertiesKHR*>( this );
}
operator VkDisplayModePropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayModePropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DisplayModeKHR const &, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( displayMode, parameters );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayModePropertiesKHR const & ) const = default;
#else
bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( displayMode == rhs.displayMode )
&& ( parameters == rhs.parameters );
#endif
}
bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
};
// wrapper struct for struct VkDisplayModeProperties2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeProperties2KHR.html
struct DisplayModeProperties2KHR
{
using NativeType = VkDisplayModeProperties2KHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, displayModeProperties{ displayModeProperties_ }
{}
VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayModeProperties2KHR( *reinterpret_cast<DisplayModeProperties2KHR const *>( &rhs ) )
{}
DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const *>( &rhs );
return *this;
}
operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayModeProperties2KHR*>( this );
}
operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayModeProperties2KHR*>( this );
}
operator VkDisplayModeProperties2KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayModeProperties2KHR*>( this );
}
operator VkDisplayModeProperties2KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayModeProperties2KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, displayModeProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayModeProperties2KHR const & ) const = default;
#else
bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( displayModeProperties == rhs.displayModeProperties );
#endif
}
bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {};
};
template <>
struct CppType<StructureType, StructureType::eDisplayModeProperties2KHR>
{
using Type = DisplayModeProperties2KHR;
};
// wrapper struct for struct VkDisplayModeStereoPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeStereoPropertiesNV.html
struct DisplayModeStereoPropertiesNV
{
using NativeType = VkDisplayModeStereoPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeStereoPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayModeStereoPropertiesNV(VULKAN_HPP_NAMESPACE::Bool32 hdmi3DSupported_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, hdmi3DSupported{ hdmi3DSupported_ }
{}
VULKAN_HPP_CONSTEXPR DisplayModeStereoPropertiesNV( DisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayModeStereoPropertiesNV( VkDisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayModeStereoPropertiesNV( *reinterpret_cast<DisplayModeStereoPropertiesNV const *>( &rhs ) )
{}
DisplayModeStereoPropertiesNV & operator=( DisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayModeStereoPropertiesNV & operator=( VkDisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeStereoPropertiesNV const *>( &rhs );
return *this;
}
operator VkDisplayModeStereoPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayModeStereoPropertiesNV*>( this );
}
operator VkDisplayModeStereoPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayModeStereoPropertiesNV*>( this );
}
operator VkDisplayModeStereoPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayModeStereoPropertiesNV*>( this );
}
operator VkDisplayModeStereoPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayModeStereoPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, hdmi3DSupported );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayModeStereoPropertiesNV const & ) const = default;
#else
bool operator==( DisplayModeStereoPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( hdmi3DSupported == rhs.hdmi3DSupported );
#endif
}
bool operator!=( DisplayModeStereoPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeStereoPropertiesNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 hdmi3DSupported = {};
};
template <>
struct CppType<StructureType, StructureType::eDisplayModeStereoPropertiesNV>
{
using Type = DisplayModeStereoPropertiesNV;
};
// wrapper struct for struct VkDisplayNativeHdrSurfaceCapabilitiesAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayNativeHdrSurfaceCapabilitiesAMD.html
struct DisplayNativeHdrSurfaceCapabilitiesAMD
{
using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, localDimmingSupport{ localDimmingSupport_ }
{}
VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayNativeHdrSurfaceCapabilitiesAMD( *reinterpret_cast<DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs ) )
{}
DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs );
return *this;
}
operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
}
operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
}
operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
}
operator VkDisplayNativeHdrSurfaceCapabilitiesAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, localDimmingSupport );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default;
#else
bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( localDimmingSupport == rhs.localDimmingSupport );
#endif
}
bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {};
};
template <>
struct CppType<StructureType, StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD>
{
using Type = DisplayNativeHdrSurfaceCapabilitiesAMD;
};
// wrapper struct for struct VkDisplayPlaneCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneCapabilitiesKHR.html
struct DisplayPlaneCapabilitiesKHR
{
using NativeType = VkDisplayPlaneCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {}) VULKAN_HPP_NOEXCEPT
: supportedAlpha{ supportedAlpha_ }, minSrcPosition{ minSrcPosition_ }, maxSrcPosition{ maxSrcPosition_ }, minSrcExtent{ minSrcExtent_ }, maxSrcExtent{ maxSrcExtent_ }, minDstPosition{ minDstPosition_ }, maxDstPosition{ maxDstPosition_ }, minDstExtent{ minDstExtent_ }, maxDstExtent{ maxDstExtent_ }
{}
VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPlaneCapabilitiesKHR( *reinterpret_cast<DisplayPlaneCapabilitiesKHR const *>( &rhs ) )
{}
DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>( this );
}
operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( this );
}
operator VkDisplayPlaneCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>( this );
}
operator VkDisplayPlaneCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( supportedAlpha, minSrcPosition, maxSrcPosition, minSrcExtent, maxSrcExtent, minDstPosition, maxDstPosition, minDstExtent, maxDstExtent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default;
#else
bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( supportedAlpha == rhs.supportedAlpha )
&& ( minSrcPosition == rhs.minSrcPosition )
&& ( maxSrcPosition == rhs.maxSrcPosition )
&& ( minSrcExtent == rhs.minSrcExtent )
&& ( maxSrcExtent == rhs.maxSrcExtent )
&& ( minDstPosition == rhs.minDstPosition )
&& ( maxDstPosition == rhs.maxDstPosition )
&& ( minDstExtent == rhs.minDstExtent )
&& ( maxDstExtent == rhs.maxDstExtent );
#endif
}
bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {};
VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {};
VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {};
VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {};
VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {};
VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {};
VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {};
VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {};
VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {};
};
// wrapper struct for struct VkDisplayPlaneCapabilities2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneCapabilities2KHR.html
struct DisplayPlaneCapabilities2KHR
{
using NativeType = VkDisplayPlaneCapabilities2KHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR(VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, capabilities{ capabilities_ }
{}
VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPlaneCapabilities2KHR( *reinterpret_cast<DisplayPlaneCapabilities2KHR const *>( &rhs ) )
{}
DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const *>( &rhs );
return *this;
}
operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR*>( this );
}
operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( this );
}
operator VkDisplayPlaneCapabilities2KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayPlaneCapabilities2KHR*>( this );
}
operator VkDisplayPlaneCapabilities2KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, capabilities );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default;
#else
bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( capabilities == rhs.capabilities );
#endif
}
bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {};
};
template <>
struct CppType<StructureType, StructureType::eDisplayPlaneCapabilities2KHR>
{
using Type = DisplayPlaneCapabilities2KHR;
};
// wrapper struct for struct VkDisplayPlaneInfo2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneInfo2KHR.html
struct DisplayPlaneInfo2KHR
{
using NativeType = VkDisplayPlaneInfo2KHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, mode{ mode_ }, planeIndex{ planeIndex_ }
{}
VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPlaneInfo2KHR( *reinterpret_cast<DisplayPlaneInfo2KHR const *>( &rhs ) )
{}
DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
{
planeIndex = planeIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( this );
}
operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPlaneInfo2KHR*>( this );
}
operator VkDisplayPlaneInfo2KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( this );
}
operator VkDisplayPlaneInfo2KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayPlaneInfo2KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayModeKHR const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, mode, planeIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default;
#else
bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( mode == rhs.mode )
&& ( planeIndex == rhs.planeIndex );
#endif
}
bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {};
uint32_t planeIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eDisplayPlaneInfo2KHR>
{
using Type = DisplayPlaneInfo2KHR;
};
// wrapper struct for struct VkDisplayPlanePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlanePropertiesKHR.html
struct DisplayPlanePropertiesKHR
{
using NativeType = VkDisplayPlanePropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, uint32_t currentStackIndex_ = {}) VULKAN_HPP_NOEXCEPT
: currentDisplay{ currentDisplay_ }, currentStackIndex{ currentStackIndex_ }
{}
VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPlanePropertiesKHR( *reinterpret_cast<DisplayPlanePropertiesKHR const *>( &rhs ) )
{}
DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const *>( &rhs );
return *this;
}
operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>( this );
}
operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPlanePropertiesKHR*>( this );
}
operator VkDisplayPlanePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayPlanePropertiesKHR*>( this );
}
operator VkDisplayPlanePropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayPlanePropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DisplayKHR const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( currentDisplay, currentStackIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default;
#else
bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( currentDisplay == rhs.currentDisplay )
&& ( currentStackIndex == rhs.currentStackIndex );
#endif
}
bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {};
uint32_t currentStackIndex = {};
};
// wrapper struct for struct VkDisplayPlaneProperties2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneProperties2KHR.html
struct DisplayPlaneProperties2KHR
{
using NativeType = VkDisplayPlaneProperties2KHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, displayPlaneProperties{ displayPlaneProperties_ }
{}
VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPlaneProperties2KHR( *reinterpret_cast<DisplayPlaneProperties2KHR const *>( &rhs ) )
{}
DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const *>( &rhs );
return *this;
}
operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPlaneProperties2KHR*>( this );
}
operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPlaneProperties2KHR*>( this );
}
operator VkDisplayPlaneProperties2KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayPlaneProperties2KHR*>( this );
}
operator VkDisplayPlaneProperties2KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayPlaneProperties2KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, displayPlaneProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default;
#else
bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( displayPlaneProperties == rhs.displayPlaneProperties );
#endif
}
bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {};
};
template <>
struct CppType<StructureType, StructureType::eDisplayPlaneProperties2KHR>
{
using Type = DisplayPlaneProperties2KHR;
};
// wrapper struct for struct VkDisplayPowerInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPowerInfoEXT.html
struct DisplayPowerInfoEXT
{
using NativeType = VkDisplayPowerInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT(VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, powerState{ powerState_ }
{}
VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPowerInfoEXT( *reinterpret_cast<DisplayPowerInfoEXT const *>( &rhs ) )
{}
DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
{
powerState = powerState_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPowerInfoEXT*>( this );
}
operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPowerInfoEXT*>( this );
}
operator VkDisplayPowerInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayPowerInfoEXT*>( this );
}
operator VkDisplayPowerInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayPowerInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, powerState );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayPowerInfoEXT const & ) const = default;
#else
bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( powerState == rhs.powerState );
#endif
}
bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff;
};
template <>
struct CppType<StructureType, StructureType::eDisplayPowerInfoEXT>
{
using Type = DisplayPowerInfoEXT;
};
// wrapper struct for struct VkDisplayPresentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPresentInfoKHR.html
struct DisplayPresentInfoKHR
{
using NativeType = VkDisplayPresentInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR(VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcRect{ srcRect_ }, dstRect{ dstRect_ }, persistent{ persistent_ }
{}
VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPresentInfoKHR( *reinterpret_cast<DisplayPresentInfoKHR const *>( &rhs ) )
{}
DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT
{
srcRect = srcRect_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT
{
dstRect = dstRect_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
{
persistent = persistent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPresentInfoKHR*>( this );
}
operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPresentInfoKHR*>( this );
}
operator VkDisplayPresentInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayPresentInfoKHR*>( this );
}
operator VkDisplayPresentInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayPresentInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Rect2D const &, VULKAN_HPP_NAMESPACE::Rect2D const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcRect, dstRect, persistent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayPresentInfoKHR const & ) const = default;
#else
bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcRect == rhs.srcRect )
&& ( dstRect == rhs.dstRect )
&& ( persistent == rhs.persistent );
#endif
}
bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Rect2D srcRect = {};
VULKAN_HPP_NAMESPACE::Rect2D dstRect = {};
VULKAN_HPP_NAMESPACE::Bool32 persistent = {};
};
template <>
struct CppType<StructureType, StructureType::eDisplayPresentInfoKHR>
{
using Type = DisplayPresentInfoKHR;
};
// wrapper struct for struct VkDisplayPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPropertiesKHR.html
struct DisplayPropertiesKHR
{
using NativeType = VkDisplayPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, const char * displayName_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {}) VULKAN_HPP_NOEXCEPT
: display{ display_ }, displayName{ displayName_ }, physicalDimensions{ physicalDimensions_ }, physicalResolution{ physicalResolution_ }, supportedTransforms{ supportedTransforms_ }, planeReorderPossible{ planeReorderPossible_ }, persistentContent{ persistentContent_ }
{}
VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPropertiesKHR( *reinterpret_cast<DisplayPropertiesKHR const *>( &rhs ) )
{}
DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const *>( &rhs );
return *this;
}
operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPropertiesKHR*>( this );
}
operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPropertiesKHR*>( this );
}
operator VkDisplayPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayPropertiesKHR*>( this );
}
operator VkDisplayPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DisplayKHR const &, const char * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( display, displayName, physicalDimensions, physicalResolution, supportedTransforms, planeReorderPossible, persistentContent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = display <=> rhs.display; cmp != 0 ) return cmp;
if ( displayName != rhs.displayName )
if ( auto cmp = strcmp( displayName, rhs.displayName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = physicalDimensions <=> rhs.physicalDimensions; cmp != 0 ) return cmp;
if ( auto cmp = physicalResolution <=> rhs.physicalResolution; cmp != 0 ) return cmp;
if ( auto cmp = supportedTransforms <=> rhs.supportedTransforms; cmp != 0 ) return cmp;
if ( auto cmp = planeReorderPossible <=> rhs.planeReorderPossible; cmp != 0 ) return cmp;
if ( auto cmp = persistentContent <=> rhs.persistentContent; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( display == rhs.display )
&& ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) )
&& ( physicalDimensions == rhs.physicalDimensions )
&& ( physicalResolution == rhs.physicalResolution )
&& ( supportedTransforms == rhs.supportedTransforms )
&& ( planeReorderPossible == rhs.planeReorderPossible )
&& ( persistentContent == rhs.persistentContent );
}
bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::DisplayKHR display = {};
const char * displayName = {};
VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {};
VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {};
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {};
VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {};
};
// wrapper struct for struct VkDisplayProperties2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayProperties2KHR.html
struct DisplayProperties2KHR
{
using NativeType = VkDisplayProperties2KHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, displayProperties{ displayProperties_ }
{}
VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayProperties2KHR( *reinterpret_cast<DisplayProperties2KHR const *>( &rhs ) )
{}
DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const *>( &rhs );
return *this;
}
operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayProperties2KHR*>( this );
}
operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayProperties2KHR*>( this );
}
operator VkDisplayProperties2KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplayProperties2KHR*>( this );
}
operator VkDisplayProperties2KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplayProperties2KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, displayProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplayProperties2KHR const & ) const = default;
#else
bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( displayProperties == rhs.displayProperties );
#endif
}
bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {};
};
template <>
struct CppType<StructureType, StructureType::eDisplayProperties2KHR>
{
using Type = DisplayProperties2KHR;
};
// wrapper struct for struct VkDisplaySurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplaySurfaceCreateInfoKHR.html
struct DisplaySurfaceCreateInfoKHR
{
using NativeType = VkDisplaySurfaceCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, uint32_t planeIndex_ = {}, uint32_t planeStackIndex_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = {}, VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, displayMode{ displayMode_ }, planeIndex{ planeIndex_ }, planeStackIndex{ planeStackIndex_ }, transform{ transform_ }, globalAlpha{ globalAlpha_ }, alphaMode{ alphaMode_ }, imageExtent{ imageExtent_ }
{}
VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplaySurfaceCreateInfoKHR( *reinterpret_cast<DisplaySurfaceCreateInfoKHR const *>( &rhs ) )
{}
DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
{
displayMode = displayMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
{
planeIndex = planeIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
{
planeStackIndex = planeStackIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
{
transform = transform_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT
{
globalAlpha = globalAlpha_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
{
alphaMode = alphaMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( this );
}
operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR*>( this );
}
operator VkDisplaySurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( this );
}
operator VkDisplaySurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplaySurfaceCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::DisplayModeKHR const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, float const &, VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default;
#else
bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( displayMode == rhs.displayMode )
&& ( planeIndex == rhs.planeIndex )
&& ( planeStackIndex == rhs.planeStackIndex )
&& ( transform == rhs.transform )
&& ( globalAlpha == rhs.globalAlpha )
&& ( alphaMode == rhs.alphaMode )
&& ( imageExtent == rhs.imageExtent );
#endif
}
bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
uint32_t planeIndex = {};
uint32_t planeStackIndex = {};
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
float globalAlpha = {};
VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
};
template <>
struct CppType<StructureType, StructureType::eDisplaySurfaceCreateInfoKHR>
{
using Type = DisplaySurfaceCreateInfoKHR;
};
// wrapper struct for struct VkDisplaySurfaceStereoCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplaySurfaceStereoCreateInfoNV.html
struct DisplaySurfaceStereoCreateInfoNV
{
using NativeType = VkDisplaySurfaceStereoCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceStereoCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplaySurfaceStereoCreateInfoNV(VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV stereoType_ = VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV::eNone, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stereoType{ stereoType_ }
{}
VULKAN_HPP_CONSTEXPR DisplaySurfaceStereoCreateInfoNV( DisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplaySurfaceStereoCreateInfoNV( VkDisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplaySurfaceStereoCreateInfoNV( *reinterpret_cast<DisplaySurfaceStereoCreateInfoNV const *>( &rhs ) )
{}
DisplaySurfaceStereoCreateInfoNV & operator=( DisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DisplaySurfaceStereoCreateInfoNV & operator=( VkDisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV & setStereoType( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV stereoType_ ) VULKAN_HPP_NOEXCEPT
{
stereoType = stereoType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDisplaySurfaceStereoCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplaySurfaceStereoCreateInfoNV*>( this );
}
operator VkDisplaySurfaceStereoCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplaySurfaceStereoCreateInfoNV*>( this );
}
operator VkDisplaySurfaceStereoCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDisplaySurfaceStereoCreateInfoNV*>( this );
}
operator VkDisplaySurfaceStereoCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDisplaySurfaceStereoCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stereoType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DisplaySurfaceStereoCreateInfoNV const & ) const = default;
#else
bool operator==( DisplaySurfaceStereoCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stereoType == rhs.stereoType );
#endif
}
bool operator!=( DisplaySurfaceStereoCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceStereoCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV stereoType = VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV::eNone;
};
template <>
struct CppType<StructureType, StructureType::eDisplaySurfaceStereoCreateInfoNV>
{
using Type = DisplaySurfaceStereoCreateInfoNV;
};
// wrapper struct for struct VkDrawIndexedIndirectCommand, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawIndexedIndirectCommand.html
struct DrawIndexedIndirectCommand
{
using NativeType = VkDrawIndexedIndirectCommand;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand(uint32_t indexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstIndex_ = {}, int32_t vertexOffset_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT
: indexCount{ indexCount_ }, instanceCount{ instanceCount_ }, firstIndex{ firstIndex_ }, vertexOffset{ vertexOffset_ }, firstInstance{ firstInstance_ }
{}
VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
: DrawIndexedIndirectCommand( *reinterpret_cast<DrawIndexedIndirectCommand const *>( &rhs ) )
{}
DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
{
indexCount = indexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
{
instanceCount = instanceCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
{
firstIndex = firstIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
{
vertexOffset = vertexOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
{
firstInstance = firstInstance_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>( this );
}
operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrawIndexedIndirectCommand*>( this );
}
operator VkDrawIndexedIndirectCommand const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDrawIndexedIndirectCommand*>( this );
}
operator VkDrawIndexedIndirectCommand *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDrawIndexedIndirectCommand*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, int32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DrawIndexedIndirectCommand const & ) const = default;
#else
bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( indexCount == rhs.indexCount )
&& ( instanceCount == rhs.instanceCount )
&& ( firstIndex == rhs.firstIndex )
&& ( vertexOffset == rhs.vertexOffset )
&& ( firstInstance == rhs.firstInstance );
#endif
}
bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t indexCount = {};
uint32_t instanceCount = {};
uint32_t firstIndex = {};
int32_t vertexOffset = {};
uint32_t firstInstance = {};
};
// wrapper struct for struct VkDrawIndirectCommand, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawIndirectCommand.html
struct DrawIndirectCommand
{
using NativeType = VkDrawIndirectCommand;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DrawIndirectCommand(uint32_t vertexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstVertex_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT
: vertexCount{ vertexCount_ }, instanceCount{ instanceCount_ }, firstVertex{ firstVertex_ }, firstInstance{ firstInstance_ }
{}
VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
: DrawIndirectCommand( *reinterpret_cast<DrawIndirectCommand const *>( &rhs ) )
{}
DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndirectCommand const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
{
vertexCount = vertexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
{
instanceCount = instanceCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
{
firstVertex = firstVertex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
{
firstInstance = firstInstance_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrawIndirectCommand*>( this );
}
operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrawIndirectCommand*>( this );
}
operator VkDrawIndirectCommand const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDrawIndirectCommand*>( this );
}
operator VkDrawIndirectCommand *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDrawIndirectCommand*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( vertexCount, instanceCount, firstVertex, firstInstance );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DrawIndirectCommand const & ) const = default;
#else
bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( vertexCount == rhs.vertexCount )
&& ( instanceCount == rhs.instanceCount )
&& ( firstVertex == rhs.firstVertex )
&& ( firstInstance == rhs.firstInstance );
#endif
}
bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t vertexCount = {};
uint32_t instanceCount = {};
uint32_t firstVertex = {};
uint32_t firstInstance = {};
};
// wrapper struct for struct VkDrawIndirectCountIndirectCommandEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawIndirectCountIndirectCommandEXT.html
struct DrawIndirectCountIndirectCommandEXT
{
using NativeType = VkDrawIndirectCountIndirectCommandEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DrawIndirectCountIndirectCommandEXT(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t stride_ = {}, uint32_t commandCount_ = {}) VULKAN_HPP_NOEXCEPT
: bufferAddress{ bufferAddress_ }, stride{ stride_ }, commandCount{ commandCount_ }
{}
VULKAN_HPP_CONSTEXPR DrawIndirectCountIndirectCommandEXT( DrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrawIndirectCountIndirectCommandEXT( VkDrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DrawIndirectCountIndirectCommandEXT( *reinterpret_cast<DrawIndirectCountIndirectCommandEXT const *>( &rhs ) )
{}
DrawIndirectCountIndirectCommandEXT & operator=( DrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DrawIndirectCountIndirectCommandEXT & operator=( VkDrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndirectCountIndirectCommandEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferAddress = bufferAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
{
stride = stride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setCommandCount( uint32_t commandCount_ ) VULKAN_HPP_NOEXCEPT
{
commandCount = commandCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDrawIndirectCountIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrawIndirectCountIndirectCommandEXT*>( this );
}
operator VkDrawIndirectCountIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrawIndirectCountIndirectCommandEXT*>( this );
}
operator VkDrawIndirectCountIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDrawIndirectCountIndirectCommandEXT*>( this );
}
operator VkDrawIndirectCountIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDrawIndirectCountIndirectCommandEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( bufferAddress, stride, commandCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DrawIndirectCountIndirectCommandEXT const & ) const = default;
#else
bool operator==( DrawIndirectCountIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( bufferAddress == rhs.bufferAddress )
&& ( stride == rhs.stride )
&& ( commandCount == rhs.commandCount );
#endif
}
bool operator!=( DrawIndirectCountIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
uint32_t stride = {};
uint32_t commandCount = {};
};
// wrapper struct for struct VkDrawMeshTasksIndirectCommandEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawMeshTasksIndirectCommandEXT.html
struct DrawMeshTasksIndirectCommandEXT
{
using NativeType = VkDrawMeshTasksIndirectCommandEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT(uint32_t groupCountX_ = {}, uint32_t groupCountY_ = {}, uint32_t groupCountZ_ = {}) VULKAN_HPP_NOEXCEPT
: groupCountX{ groupCountX_ }, groupCountY{ groupCountY_ }, groupCountZ{ groupCountZ_ }
{}
VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrawMeshTasksIndirectCommandEXT( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DrawMeshTasksIndirectCommandEXT( *reinterpret_cast<DrawMeshTasksIndirectCommandEXT const *>( &rhs ) )
{}
DrawMeshTasksIndirectCommandEXT & operator=( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DrawMeshTasksIndirectCommandEXT & operator=( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountX( uint32_t groupCountX_ ) VULKAN_HPP_NOEXCEPT
{
groupCountX = groupCountX_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountY( uint32_t groupCountY_ ) VULKAN_HPP_NOEXCEPT
{
groupCountY = groupCountY_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountZ( uint32_t groupCountZ_ ) VULKAN_HPP_NOEXCEPT
{
groupCountZ = groupCountZ_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDrawMeshTasksIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandEXT*>( this );
}
operator VkDrawMeshTasksIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrawMeshTasksIndirectCommandEXT*>( this );
}
operator VkDrawMeshTasksIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDrawMeshTasksIndirectCommandEXT*>( this );
}
operator VkDrawMeshTasksIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDrawMeshTasksIndirectCommandEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( groupCountX, groupCountY, groupCountZ );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DrawMeshTasksIndirectCommandEXT const & ) const = default;
#else
bool operator==( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( groupCountX == rhs.groupCountX )
&& ( groupCountY == rhs.groupCountY )
&& ( groupCountZ == rhs.groupCountZ );
#endif
}
bool operator!=( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t groupCountX = {};
uint32_t groupCountY = {};
uint32_t groupCountZ = {};
};
// wrapper struct for struct VkDrawMeshTasksIndirectCommandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawMeshTasksIndirectCommandNV.html
struct DrawMeshTasksIndirectCommandNV
{
using NativeType = VkDrawMeshTasksIndirectCommandNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV(uint32_t taskCount_ = {}, uint32_t firstTask_ = {}) VULKAN_HPP_NOEXCEPT
: taskCount{ taskCount_ }, firstTask{ firstTask_ }
{}
VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: DrawMeshTasksIndirectCommandNV( *reinterpret_cast<DrawMeshTasksIndirectCommandNV const *>( &rhs ) )
{}
DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT
{
taskCount = taskCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT
{
firstTask = firstTask_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV*>( this );
}
operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV*>( this );
}
operator VkDrawMeshTasksIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV*>( this );
}
operator VkDrawMeshTasksIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDrawMeshTasksIndirectCommandNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( taskCount, firstTask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default;
#else
bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( taskCount == rhs.taskCount )
&& ( firstTask == rhs.firstTask );
#endif
}
bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t taskCount = {};
uint32_t firstTask = {};
};
// wrapper struct for struct VkDrmFormatModifierProperties2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierProperties2EXT.html
struct DrmFormatModifierProperties2EXT
{
using NativeType = VkDrmFormatModifierProperties2EXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {}) VULKAN_HPP_NOEXCEPT
: drmFormatModifier{ drmFormatModifier_ }, drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ }, drmFormatModifierTilingFeatures{ drmFormatModifierTilingFeatures_ }
{}
VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrmFormatModifierProperties2EXT( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DrmFormatModifierProperties2EXT( *reinterpret_cast<DrmFormatModifierProperties2EXT const *>( &rhs ) )
{}
DrmFormatModifierProperties2EXT & operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DrmFormatModifierProperties2EXT & operator=( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT const *>( &rhs );
return *this;
}
operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrmFormatModifierProperties2EXT*>( this );
}
operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrmFormatModifierProperties2EXT*>( this );
}
operator VkDrmFormatModifierProperties2EXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDrmFormatModifierProperties2EXT*>( this );
}
operator VkDrmFormatModifierProperties2EXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDrmFormatModifierProperties2EXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint64_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DrmFormatModifierProperties2EXT const & ) const = default;
#else
bool operator==( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( drmFormatModifier == rhs.drmFormatModifier )
&& ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
&& ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
#endif
}
bool operator!=( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint64_t drmFormatModifier = {};
uint32_t drmFormatModifierPlaneCount = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures = {};
};
// wrapper struct for struct VkDrmFormatModifierPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierPropertiesEXT.html
struct DrmFormatModifierPropertiesEXT
{
using NativeType = VkDrmFormatModifierPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {}) VULKAN_HPP_NOEXCEPT
: drmFormatModifier{ drmFormatModifier_ }, drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ }, drmFormatModifierTilingFeatures{ drmFormatModifierTilingFeatures_ }
{}
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DrmFormatModifierPropertiesEXT( *reinterpret_cast<DrmFormatModifierPropertiesEXT const *>( &rhs ) )
{}
DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const *>( &rhs );
return *this;
}
operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT*>( this );
}
operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT*>( this );
}
operator VkDrmFormatModifierPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDrmFormatModifierPropertiesEXT*>( this );
}
operator VkDrmFormatModifierPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDrmFormatModifierPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint64_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default;
#else
bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( drmFormatModifier == rhs.drmFormatModifier )
&& ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
&& ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
#endif
}
bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint64_t drmFormatModifier = {};
uint32_t drmFormatModifierPlaneCount = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {};
};
// wrapper struct for struct VkDrmFormatModifierPropertiesList2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierPropertiesList2EXT.html
struct DrmFormatModifierPropertiesList2EXT
{
using NativeType = VkDrmFormatModifierPropertiesList2EXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesList2EXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT(uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, drmFormatModifierCount{ drmFormatModifierCount_ }, pDrmFormatModifierProperties{ pDrmFormatModifierProperties_ }
{}
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrmFormatModifierPropertiesList2EXT( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DrmFormatModifierPropertiesList2EXT( *reinterpret_cast<DrmFormatModifierPropertiesList2EXT const *>( &rhs ) )
{}
DrmFormatModifierPropertiesList2EXT & operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DrmFormatModifierPropertiesList2EXT & operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT const *>( &rhs );
return *this;
}
operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrmFormatModifierPropertiesList2EXT*>( this );
}
operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrmFormatModifierPropertiesList2EXT*>( this );
}
operator VkDrmFormatModifierPropertiesList2EXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDrmFormatModifierPropertiesList2EXT*>( this );
}
operator VkDrmFormatModifierPropertiesList2EXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDrmFormatModifierPropertiesList2EXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DrmFormatModifierPropertiesList2EXT const & ) const = default;
#else
bool operator==( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( drmFormatModifierCount == rhs.drmFormatModifierCount )
&& ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
#endif
}
bool operator!=( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT;
void * pNext = {};
uint32_t drmFormatModifierCount = {};
VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties = {};
};
template <>
struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesList2EXT>
{
using Type = DrmFormatModifierPropertiesList2EXT;
};
// wrapper struct for struct VkDrmFormatModifierPropertiesListEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierPropertiesListEXT.html
struct DrmFormatModifierPropertiesListEXT
{
using NativeType = VkDrmFormatModifierPropertiesListEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT(uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, drmFormatModifierCount{ drmFormatModifierCount_ }, pDrmFormatModifierProperties{ pDrmFormatModifierProperties_ }
{}
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DrmFormatModifierPropertiesListEXT( *reinterpret_cast<DrmFormatModifierPropertiesListEXT const *>( &rhs ) )
{}
DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const *>( &rhs );
return *this;
}
operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT*>( this );
}
operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT*>( this );
}
operator VkDrmFormatModifierPropertiesListEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT*>( this );
}
operator VkDrmFormatModifierPropertiesListEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkDrmFormatModifierPropertiesListEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default;
#else
bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( drmFormatModifierCount == rhs.drmFormatModifierCount )
&& ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
#endif
}
bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
void * pNext = {};
uint32_t drmFormatModifierCount = {};
VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {};
};
template <>
struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesListEXT>
{
using Type = DrmFormatModifierPropertiesListEXT;
};
// wrapper struct for struct VkEventCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkEventCreateInfo.html
struct EventCreateInfo
{
using NativeType = VkEventCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR EventCreateInfo(VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: EventCreateInfo( *reinterpret_cast<EventCreateInfo const *>( &rhs ) )
{}
EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::EventCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkEventCreateInfo*>( this );
}
operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkEventCreateInfo*>( this );
}
operator VkEventCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkEventCreateInfo*>( this );
}
operator VkEventCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkEventCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::EventCreateFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( EventCreateInfo const & ) const = default;
#else
bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {};
};
template <>
struct CppType<StructureType, StructureType::eEventCreateInfo>
{
using Type = EventCreateInfo;
};
// wrapper struct for struct VkPipelineLibraryCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineLibraryCreateInfoKHR.html
struct PipelineLibraryCreateInfoKHR
{
using NativeType = VkPipelineLibraryCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR(uint32_t libraryCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, libraryCount{ libraryCount_ }, pLibraries{ pLibraries_ }
{}
VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineLibraryCreateInfoKHR( *reinterpret_cast<PipelineLibraryCreateInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_, const void * pNext_ = nullptr )
: pNext( pNext_ ), libraryCount( static_cast<uint32_t>( libraries_.size() ) ), pLibraries( libraries_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT
{
libraryCount = libraryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT
{
pLibraries = pLibraries_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineLibraryCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ ) VULKAN_HPP_NOEXCEPT
{
libraryCount = static_cast<uint32_t>( libraries_.size() );
pLibraries = libraries_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineLibraryCreateInfoKHR*>( this );
}
operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineLibraryCreateInfoKHR*>( this );
}
operator VkPipelineLibraryCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineLibraryCreateInfoKHR*>( this );
}
operator VkPipelineLibraryCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineLibraryCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Pipeline * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, libraryCount, pLibraries );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default;
#else
bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( libraryCount == rhs.libraryCount )
&& ( pLibraries == rhs.pLibraries );
#endif
}
bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR;
const void * pNext = {};
uint32_t libraryCount = {};
const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineLibraryCreateInfoKHR>
{
using Type = PipelineLibraryCreateInfoKHR;
};
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkExecutionGraphPipelineCreateInfoAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExecutionGraphPipelineCreateInfoAMDX.html
struct ExecutionGraphPipelineCreateInfoAMDX
{
using NativeType = VkExecutionGraphPipelineCreateInfoAMDX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineCreateInfoAMDX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, stageCount{ stageCount_ }, pStages{ pStages_ }, pLibraryInfo{ pLibraryInfo_ }, layout{ layout_ }, basePipelineHandle{ basePipelineHandle_ }, basePipelineIndex{ basePipelineIndex_ }
{}
VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExecutionGraphPipelineCreateInfoAMDX( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
: ExecutionGraphPipelineCreateInfoAMDX( *reinterpret_cast<ExecutionGraphPipelineCreateInfoAMDX const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ExecutionGraphPipelineCreateInfoAMDX( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pLibraryInfo( pLibraryInfo_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ExecutionGraphPipelineCreateInfoAMDX & operator=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExecutionGraphPipelineCreateInfoAMDX & operator=( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = stageCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
{
pStages = pStages_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ExecutionGraphPipelineCreateInfoAMDX & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = static_cast<uint32_t>( stages_.size() );
pStages = stages_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT
{
pLibraryInfo = pLibraryInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineHandle = basePipelineHandle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineIndex = basePipelineIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExecutionGraphPipelineCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX*>( this );
}
operator VkExecutionGraphPipelineCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExecutionGraphPipelineCreateInfoAMDX*>( this );
}
operator VkExecutionGraphPipelineCreateInfoAMDX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX*>( this );
}
operator VkExecutionGraphPipelineCreateInfoAMDX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExecutionGraphPipelineCreateInfoAMDX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, stageCount, pStages, pLibraryInfo, layout, basePipelineHandle, basePipelineIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExecutionGraphPipelineCreateInfoAMDX const & ) const = default;
#else
bool operator==( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( stageCount == rhs.stageCount )
&& ( pStages == rhs.pStages )
&& ( pLibraryInfo == rhs.pLibraryInfo )
&& ( layout == rhs.layout )
&& ( basePipelineHandle == rhs.basePipelineHandle )
&& ( basePipelineIndex == rhs.basePipelineIndex );
#endif
}
bool operator!=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineCreateInfoAMDX;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
uint32_t stageCount = {};
const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {};
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
int32_t basePipelineIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eExecutionGraphPipelineCreateInfoAMDX>
{
using Type = ExecutionGraphPipelineCreateInfoAMDX;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkExecutionGraphPipelineScratchSizeAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExecutionGraphPipelineScratchSizeAMDX.html
struct ExecutionGraphPipelineScratchSizeAMDX
{
using NativeType = VkExecutionGraphPipelineScratchSizeAMDX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineScratchSizeAMDX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX(VULKAN_HPP_NAMESPACE::DeviceSize minSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minSize{ minSize_ }, maxSize{ maxSize_ }, sizeGranularity{ sizeGranularity_ }
{}
VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExecutionGraphPipelineScratchSizeAMDX( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
: ExecutionGraphPipelineScratchSizeAMDX( *reinterpret_cast<ExecutionGraphPipelineScratchSizeAMDX const *>( &rhs ) )
{}
ExecutionGraphPipelineScratchSizeAMDX & operator=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExecutionGraphPipelineScratchSizeAMDX & operator=( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMinSize( VULKAN_HPP_NAMESPACE::DeviceSize minSize_ ) VULKAN_HPP_NOEXCEPT
{
minSize = minSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMaxSize( VULKAN_HPP_NAMESPACE::DeviceSize maxSize_ ) VULKAN_HPP_NOEXCEPT
{
maxSize = maxSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setSizeGranularity( VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity_ ) VULKAN_HPP_NOEXCEPT
{
sizeGranularity = sizeGranularity_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExecutionGraphPipelineScratchSizeAMDX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExecutionGraphPipelineScratchSizeAMDX*>( this );
}
operator VkExecutionGraphPipelineScratchSizeAMDX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX*>( this );
}
operator VkExecutionGraphPipelineScratchSizeAMDX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExecutionGraphPipelineScratchSizeAMDX*>( this );
}
operator VkExecutionGraphPipelineScratchSizeAMDX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minSize, maxSize, sizeGranularity );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExecutionGraphPipelineScratchSizeAMDX const & ) const = default;
#else
bool operator==( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minSize == rhs.minSize )
&& ( maxSize == rhs.maxSize )
&& ( sizeGranularity == rhs.sizeGranularity );
#endif
}
bool operator!=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineScratchSizeAMDX;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize minSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity = {};
};
template <>
struct CppType<StructureType, StructureType::eExecutionGraphPipelineScratchSizeAMDX>
{
using Type = ExecutionGraphPipelineScratchSizeAMDX;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
// wrapper struct for struct VkExportFenceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportFenceCreateInfo.html
struct ExportFenceCreateInfo
{
using NativeType = VkExportFenceCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleTypes{ handleTypes_ }
{}
VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportFenceCreateInfo( *reinterpret_cast<ExportFenceCreateInfo const *>( &rhs ) )
{}
ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportFenceCreateInfo*>( this );
}
operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportFenceCreateInfo*>( this );
}
operator VkExportFenceCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportFenceCreateInfo*>( this );
}
operator VkExportFenceCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportFenceCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleTypes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportFenceCreateInfo const & ) const = default;
#else
bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleTypes == rhs.handleTypes );
#endif
}
bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {};
};
template <>
struct CppType<StructureType, StructureType::eExportFenceCreateInfo>
{
using Type = ExportFenceCreateInfo;
};
using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkExportFenceWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportFenceWin32HandleInfoKHR.html
struct ExportFenceWin32HandleInfoKHR
{
using NativeType = VkExportFenceWin32HandleInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR(const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pAttributes{ pAttributes_ }, dwAccess{ dwAccess_ }, name{ name_ }
{}
VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportFenceWin32HandleInfoKHR( *reinterpret_cast<ExportFenceWin32HandleInfoKHR const *>( &rhs ) )
{}
ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
{
pAttributes = pAttributes_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
{
dwAccess = dwAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
{
name = name_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR*>( this );
}
operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportFenceWin32HandleInfoKHR*>( this );
}
operator VkExportFenceWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportFenceWin32HandleInfoKHR*>( this );
}
operator VkExportFenceWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportFenceWin32HandleInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pAttributes, dwAccess, name );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default;
#else
bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pAttributes == rhs.pAttributes )
&& ( dwAccess == rhs.dwAccess )
&& ( name == rhs.name );
#endif
}
bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR;
const void * pNext = {};
const SECURITY_ATTRIBUTES * pAttributes = {};
DWORD dwAccess = {};
LPCWSTR name = {};
};
template <>
struct CppType<StructureType, StructureType::eExportFenceWin32HandleInfoKHR>
{
using Type = ExportFenceWin32HandleInfoKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
// wrapper struct for struct VkExportMemoryAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryAllocateInfo.html
struct ExportMemoryAllocateInfo
{
using NativeType = VkExportMemoryAllocateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleTypes{ handleTypes_ }
{}
VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMemoryAllocateInfo( *reinterpret_cast<ExportMemoryAllocateInfo const *>( &rhs ) )
{}
ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMemoryAllocateInfo*>( this );
}
operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMemoryAllocateInfo*>( this );
}
operator VkExportMemoryAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportMemoryAllocateInfo*>( this );
}
operator VkExportMemoryAllocateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportMemoryAllocateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleTypes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportMemoryAllocateInfo const & ) const = default;
#else
bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleTypes == rhs.handleTypes );
#endif
}
bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
};
template <>
struct CppType<StructureType, StructureType::eExportMemoryAllocateInfo>
{
using Type = ExportMemoryAllocateInfo;
};
using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
// wrapper struct for struct VkExportMemoryAllocateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryAllocateInfoNV.html
struct ExportMemoryAllocateInfoNV
{
using NativeType = VkExportMemoryAllocateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleTypes{ handleTypes_ }
{}
VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMemoryAllocateInfoNV( *reinterpret_cast<ExportMemoryAllocateInfoNV const *>( &rhs ) )
{}
ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>( this );
}
operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMemoryAllocateInfoNV*>( this );
}
operator VkExportMemoryAllocateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportMemoryAllocateInfoNV*>( this );
}
operator VkExportMemoryAllocateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportMemoryAllocateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleTypes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default;
#else
bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleTypes == rhs.handleTypes );
#endif
}
bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
};
template <>
struct CppType<StructureType, StructureType::eExportMemoryAllocateInfoNV>
{
using Type = ExportMemoryAllocateInfoNV;
};
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkExportMemoryWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryWin32HandleInfoKHR.html
struct ExportMemoryWin32HandleInfoKHR
{
using NativeType = VkExportMemoryWin32HandleInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR(const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pAttributes{ pAttributes_ }, dwAccess{ dwAccess_ }, name{ name_ }
{}
VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMemoryWin32HandleInfoKHR( *reinterpret_cast<ExportMemoryWin32HandleInfoKHR const *>( &rhs ) )
{}
ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
{
pAttributes = pAttributes_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
{
dwAccess = dwAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
{
name = name_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR*>( this );
}
operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR*>( this );
}
operator VkExportMemoryWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR*>( this );
}
operator VkExportMemoryWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportMemoryWin32HandleInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pAttributes, dwAccess, name );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default;
#else
bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pAttributes == rhs.pAttributes )
&& ( dwAccess == rhs.dwAccess )
&& ( name == rhs.name );
#endif
}
bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR;
const void * pNext = {};
const SECURITY_ATTRIBUTES * pAttributes = {};
DWORD dwAccess = {};
LPCWSTR name = {};
};
template <>
struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoKHR>
{
using Type = ExportMemoryWin32HandleInfoKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkExportMemoryWin32HandleInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryWin32HandleInfoNV.html
struct ExportMemoryWin32HandleInfoNV
{
using NativeType = VkExportMemoryWin32HandleInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV(const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pAttributes{ pAttributes_ }, dwAccess{ dwAccess_ }
{}
VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMemoryWin32HandleInfoNV( *reinterpret_cast<ExportMemoryWin32HandleInfoNV const *>( &rhs ) )
{}
ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
{
pAttributes = pAttributes_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
{
dwAccess = dwAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>( this );
}
operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMemoryWin32HandleInfoNV*>( this );
}
operator VkExportMemoryWin32HandleInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>( this );
}
operator VkExportMemoryWin32HandleInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportMemoryWin32HandleInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pAttributes, dwAccess );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default;
#else
bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pAttributes == rhs.pAttributes )
&& ( dwAccess == rhs.dwAccess );
#endif
}
bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV;
const void * pNext = {};
const SECURITY_ATTRIBUTES * pAttributes = {};
DWORD dwAccess = {};
};
template <>
struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoNV>
{
using Type = ExportMemoryWin32HandleInfoNV;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkExportMetalBufferInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalBufferInfoEXT.html
struct ExportMetalBufferInfoEXT
{
using NativeType = VkExportMetalBufferInfoEXT;
static const bool allowDuplicate = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalBufferInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memory{ memory_ }, mtlBuffer{ mtlBuffer_ }
{}
VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMetalBufferInfoEXT( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMetalBufferInfoEXT( *reinterpret_cast<ExportMetalBufferInfoEXT const *>( &rhs ) )
{}
ExportMetalBufferInfoEXT & operator=( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportMetalBufferInfoEXT & operator=( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT
{
mtlBuffer = mtlBuffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMetalBufferInfoEXT*>( this );
}
operator VkExportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMetalBufferInfoEXT*>( this );
}
operator VkExportMetalBufferInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportMetalBufferInfoEXT*>( this );
}
operator VkExportMetalBufferInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportMetalBufferInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, MTLBuffer_id const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memory, mtlBuffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportMetalBufferInfoEXT const & ) const = default;
#else
bool operator==( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memory == rhs.memory )
&& ( mtlBuffer == rhs.mtlBuffer );
#endif
}
bool operator!=( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalBufferInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
MTLBuffer_id mtlBuffer = {};
};
template <>
struct CppType<StructureType, StructureType::eExportMetalBufferInfoEXT>
{
using Type = ExportMetalBufferInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkExportMetalCommandQueueInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalCommandQueueInfoEXT.html
struct ExportMetalCommandQueueInfoEXT
{
using NativeType = VkExportMetalCommandQueueInfoEXT;
static const bool allowDuplicate = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalCommandQueueInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT(VULKAN_HPP_NAMESPACE::Queue queue_ = {}, MTLCommandQueue_id mtlCommandQueue_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, queue{ queue_ }, mtlCommandQueue{ mtlCommandQueue_ }
{}
VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMetalCommandQueueInfoEXT( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMetalCommandQueueInfoEXT( *reinterpret_cast<ExportMetalCommandQueueInfoEXT const *>( &rhs ) )
{}
ExportMetalCommandQueueInfoEXT & operator=( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportMetalCommandQueueInfoEXT & operator=( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setQueue( VULKAN_HPP_NAMESPACE::Queue queue_ ) VULKAN_HPP_NOEXCEPT
{
queue = queue_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setMtlCommandQueue( MTLCommandQueue_id mtlCommandQueue_ ) VULKAN_HPP_NOEXCEPT
{
mtlCommandQueue = mtlCommandQueue_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportMetalCommandQueueInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMetalCommandQueueInfoEXT*>( this );
}
operator VkExportMetalCommandQueueInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMetalCommandQueueInfoEXT*>( this );
}
operator VkExportMetalCommandQueueInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportMetalCommandQueueInfoEXT*>( this );
}
operator VkExportMetalCommandQueueInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportMetalCommandQueueInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Queue const &, MTLCommandQueue_id const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, queue, mtlCommandQueue );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportMetalCommandQueueInfoEXT const & ) const = default;
#else
bool operator==( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( queue == rhs.queue )
&& ( mtlCommandQueue == rhs.mtlCommandQueue );
#endif
}
bool operator!=( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalCommandQueueInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Queue queue = {};
MTLCommandQueue_id mtlCommandQueue = {};
};
template <>
struct CppType<StructureType, StructureType::eExportMetalCommandQueueInfoEXT>
{
using Type = ExportMetalCommandQueueInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkExportMetalDeviceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalDeviceInfoEXT.html
struct ExportMetalDeviceInfoEXT
{
using NativeType = VkExportMetalDeviceInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalDeviceInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT(MTLDevice_id mtlDevice_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, mtlDevice{ mtlDevice_ }
{}
VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMetalDeviceInfoEXT( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMetalDeviceInfoEXT( *reinterpret_cast<ExportMetalDeviceInfoEXT const *>( &rhs ) )
{}
ExportMetalDeviceInfoEXT & operator=( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportMetalDeviceInfoEXT & operator=( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setMtlDevice( MTLDevice_id mtlDevice_ ) VULKAN_HPP_NOEXCEPT
{
mtlDevice = mtlDevice_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportMetalDeviceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMetalDeviceInfoEXT*>( this );
}
operator VkExportMetalDeviceInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMetalDeviceInfoEXT*>( this );
}
operator VkExportMetalDeviceInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportMetalDeviceInfoEXT*>( this );
}
operator VkExportMetalDeviceInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportMetalDeviceInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, MTLDevice_id const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, mtlDevice );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportMetalDeviceInfoEXT const & ) const = default;
#else
bool operator==( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( mtlDevice == rhs.mtlDevice );
#endif
}
bool operator!=( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalDeviceInfoEXT;
const void * pNext = {};
MTLDevice_id mtlDevice = {};
};
template <>
struct CppType<StructureType, StructureType::eExportMetalDeviceInfoEXT>
{
using Type = ExportMetalDeviceInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkExportMetalIOSurfaceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalIOSurfaceInfoEXT.html
struct ExportMetalIOSurfaceInfoEXT
{
using NativeType = VkExportMetalIOSurfaceInfoEXT;
static const bool allowDuplicate = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalIoSurfaceInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT(VULKAN_HPP_NAMESPACE::Image image_ = {}, IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, image{ image_ }, ioSurface{ ioSurface_ }
{}
VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMetalIOSurfaceInfoEXT( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMetalIOSurfaceInfoEXT( *reinterpret_cast<ExportMetalIOSurfaceInfoEXT const *>( &rhs ) )
{}
ExportMetalIOSurfaceInfoEXT & operator=( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportMetalIOSurfaceInfoEXT & operator=( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT
{
ioSurface = ioSurface_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMetalIOSurfaceInfoEXT*>( this );
}
operator VkExportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMetalIOSurfaceInfoEXT*>( this );
}
operator VkExportMetalIOSurfaceInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportMetalIOSurfaceInfoEXT*>( this );
}
operator VkExportMetalIOSurfaceInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportMetalIOSurfaceInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, IOSurfaceRef const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, image, ioSurface );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportMetalIOSurfaceInfoEXT const & ) const = default;
#else
bool operator==( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( image == rhs.image )
&& ( ioSurface == rhs.ioSurface );
#endif
}
bool operator!=( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalIoSurfaceInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image image = {};
IOSurfaceRef ioSurface = {};
};
template <>
struct CppType<StructureType, StructureType::eExportMetalIoSurfaceInfoEXT>
{
using Type = ExportMetalIOSurfaceInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkExportMetalObjectCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalObjectCreateInfoEXT.html
struct ExportMetalObjectCreateInfoEXT
{
using NativeType = VkExportMetalObjectCreateInfoEXT;
static const bool allowDuplicate = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT(VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, exportObjectType{ exportObjectType_ }
{}
VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMetalObjectCreateInfoEXT( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMetalObjectCreateInfoEXT( *reinterpret_cast<ExportMetalObjectCreateInfoEXT const *>( &rhs ) )
{}
ExportMetalObjectCreateInfoEXT & operator=( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportMetalObjectCreateInfoEXT & operator=( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setExportObjectType( VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ ) VULKAN_HPP_NOEXCEPT
{
exportObjectType = exportObjectType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportMetalObjectCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMetalObjectCreateInfoEXT*>( this );
}
operator VkExportMetalObjectCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMetalObjectCreateInfoEXT*>( this );
}
operator VkExportMetalObjectCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportMetalObjectCreateInfoEXT*>( this );
}
operator VkExportMetalObjectCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportMetalObjectCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, exportObjectType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportMetalObjectCreateInfoEXT const & ) const = default;
#else
bool operator==( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( exportObjectType == rhs.exportObjectType );
#endif
}
bool operator!=( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice;
};
template <>
struct CppType<StructureType, StructureType::eExportMetalObjectCreateInfoEXT>
{
using Type = ExportMetalObjectCreateInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkExportMetalObjectsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalObjectsInfoEXT.html
struct ExportMetalObjectsInfoEXT
{
using NativeType = VkExportMetalObjectsInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectsInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT(const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }
{}
VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMetalObjectsInfoEXT( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMetalObjectsInfoEXT( *reinterpret_cast<ExportMetalObjectsInfoEXT const *>( &rhs ) )
{}
ExportMetalObjectsInfoEXT & operator=( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportMetalObjectsInfoEXT & operator=( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportMetalObjectsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMetalObjectsInfoEXT*>( this );
}
operator VkExportMetalObjectsInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMetalObjectsInfoEXT*>( this );
}
operator VkExportMetalObjectsInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportMetalObjectsInfoEXT*>( this );
}
operator VkExportMetalObjectsInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportMetalObjectsInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportMetalObjectsInfoEXT const & ) const = default;
#else
bool operator==( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext );
#endif
}
bool operator!=( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectsInfoEXT;
const void * pNext = {};
};
template <>
struct CppType<StructureType, StructureType::eExportMetalObjectsInfoEXT>
{
using Type = ExportMetalObjectsInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkExportMetalSharedEventInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalSharedEventInfoEXT.html
struct ExportMetalSharedEventInfoEXT
{
using NativeType = VkExportMetalSharedEventInfoEXT;
static const bool allowDuplicate = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalSharedEventInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::Event event_ = {}, MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, semaphore{ semaphore_ }, event{ event_ }, mtlSharedEvent{ mtlSharedEvent_ }
{}
VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMetalSharedEventInfoEXT( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMetalSharedEventInfoEXT( *reinterpret_cast<ExportMetalSharedEventInfoEXT const *>( &rhs ) )
{}
ExportMetalSharedEventInfoEXT & operator=( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportMetalSharedEventInfoEXT & operator=( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setEvent( VULKAN_HPP_NAMESPACE::Event event_ ) VULKAN_HPP_NOEXCEPT
{
event = event_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT
{
mtlSharedEvent = mtlSharedEvent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMetalSharedEventInfoEXT*>( this );
}
operator VkExportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMetalSharedEventInfoEXT*>( this );
}
operator VkExportMetalSharedEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportMetalSharedEventInfoEXT*>( this );
}
operator VkExportMetalSharedEventInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportMetalSharedEventInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::Event const &, MTLSharedEvent_id const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, semaphore, event, mtlSharedEvent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportMetalSharedEventInfoEXT const & ) const = default;
#else
bool operator==( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
&& ( event == rhs.event )
&& ( mtlSharedEvent == rhs.mtlSharedEvent );
#endif
}
bool operator!=( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalSharedEventInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
VULKAN_HPP_NAMESPACE::Event event = {};
MTLSharedEvent_id mtlSharedEvent = {};
};
template <>
struct CppType<StructureType, StructureType::eExportMetalSharedEventInfoEXT>
{
using Type = ExportMetalSharedEventInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkExportMetalTextureInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalTextureInfoEXT.html
struct ExportMetalTextureInfoEXT
{
using NativeType = VkExportMetalTextureInfoEXT;
static const bool allowDuplicate = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalTextureInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::BufferView bufferView_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, MTLTexture_id mtlTexture_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, image{ image_ }, imageView{ imageView_ }, bufferView{ bufferView_ }, plane{ plane_ }, mtlTexture{ mtlTexture_ }
{}
VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMetalTextureInfoEXT( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMetalTextureInfoEXT( *reinterpret_cast<ExportMetalTextureInfoEXT const *>( &rhs ) )
{}
ExportMetalTextureInfoEXT & operator=( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportMetalTextureInfoEXT & operator=( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView_ ) VULKAN_HPP_NOEXCEPT
{
bufferView = bufferView_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT
{
plane = plane_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT
{
mtlTexture = mtlTexture_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMetalTextureInfoEXT*>( this );
}
operator VkExportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMetalTextureInfoEXT*>( this );
}
operator VkExportMetalTextureInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportMetalTextureInfoEXT*>( this );
}
operator VkExportMetalTextureInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportMetalTextureInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::BufferView const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &, MTLTexture_id const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, image, imageView, bufferView, plane, mtlTexture );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportMetalTextureInfoEXT const & ) const = default;
#else
bool operator==( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( image == rhs.image )
&& ( imageView == rhs.imageView )
&& ( bufferView == rhs.bufferView )
&& ( plane == rhs.plane )
&& ( mtlTexture == rhs.mtlTexture );
#endif
}
bool operator!=( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalTextureInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
VULKAN_HPP_NAMESPACE::BufferView bufferView = {};
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
MTLTexture_id mtlTexture = {};
};
template <>
struct CppType<StructureType, StructureType::eExportMetalTextureInfoEXT>
{
using Type = ExportMetalTextureInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
// wrapper struct for struct VkExportSemaphoreCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportSemaphoreCreateInfo.html
struct ExportSemaphoreCreateInfo
{
using NativeType = VkExportSemaphoreCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleTypes{ handleTypes_ }
{}
VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportSemaphoreCreateInfo( *reinterpret_cast<ExportSemaphoreCreateInfo const *>( &rhs ) )
{}
ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportSemaphoreCreateInfo*>( this );
}
operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportSemaphoreCreateInfo*>( this );
}
operator VkExportSemaphoreCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportSemaphoreCreateInfo*>( this );
}
operator VkExportSemaphoreCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportSemaphoreCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleTypes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default;
#else
bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleTypes == rhs.handleTypes );
#endif
}
bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {};
};
template <>
struct CppType<StructureType, StructureType::eExportSemaphoreCreateInfo>
{
using Type = ExportSemaphoreCreateInfo;
};
using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkExportSemaphoreWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportSemaphoreWin32HandleInfoKHR.html
struct ExportSemaphoreWin32HandleInfoKHR
{
using NativeType = VkExportSemaphoreWin32HandleInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR(const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pAttributes{ pAttributes_ }, dwAccess{ dwAccess_ }, name{ name_ }
{}
VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ExportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
{}
ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
{
pAttributes = pAttributes_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
{
dwAccess = dwAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
{
name = name_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR*>( this );
}
operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR*>( this );
}
operator VkExportSemaphoreWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR*>( this );
}
operator VkExportSemaphoreWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pAttributes, dwAccess, name );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default;
#else
bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pAttributes == rhs.pAttributes )
&& ( dwAccess == rhs.dwAccess )
&& ( name == rhs.name );
#endif
}
bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
const void * pNext = {};
const SECURITY_ATTRIBUTES * pAttributes = {};
DWORD dwAccess = {};
LPCWSTR name = {};
};
template <>
struct CppType<StructureType, StructureType::eExportSemaphoreWin32HandleInfoKHR>
{
using Type = ExportSemaphoreWin32HandleInfoKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
// wrapper struct for struct VkExtensionProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExtensionProperties.html
struct ExtensionProperties
{
using NativeType = VkExtensionProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ExtensionProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & extensionName_ = {}, uint32_t specVersion_ = {}) VULKAN_HPP_NOEXCEPT
: extensionName{ extensionName_ }, specVersion{ specVersion_ }
{}
VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ExtensionProperties( *reinterpret_cast<ExtensionProperties const *>( &rhs ) )
{}
ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExtensionProperties const *>( &rhs );
return *this;
}
operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExtensionProperties*>( this );
}
operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExtensionProperties*>( this );
}
operator VkExtensionProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExtensionProperties*>( this );
}
operator VkExtensionProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExtensionProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( extensionName, specVersion );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = strcmp( extensionName, rhs.extensionName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( strcmp( extensionName, rhs.extensionName ) == 0 )
&& ( specVersion == rhs.specVersion );
}
bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> extensionName = {};
uint32_t specVersion = {};
};
// wrapper struct for struct VkExternalMemoryProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryProperties.html
struct ExternalMemoryProperties
{
using NativeType = VkExternalMemoryProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalMemoryProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT
: externalMemoryFeatures{ externalMemoryFeatures_ }, exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ }, compatibleHandleTypes{ compatibleHandleTypes_ }
{}
VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalMemoryProperties( *reinterpret_cast<ExternalMemoryProperties const *>( &rhs ) )
{}
ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const *>( &rhs );
return *this;
}
operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalMemoryProperties*>( this );
}
operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalMemoryProperties*>( this );
}
operator VkExternalMemoryProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalMemoryProperties*>( this );
}
operator VkExternalMemoryProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalMemoryProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalMemoryProperties const & ) const = default;
#else
bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( externalMemoryFeatures == rhs.externalMemoryFeatures )
&& ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
&& ( compatibleHandleTypes == rhs.compatibleHandleTypes );
#endif
}
bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {};
};
using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
// wrapper struct for struct VkExternalBufferProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalBufferProperties.html
struct ExternalBufferProperties
{
using NativeType = VkExternalBufferProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalBufferProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, externalMemoryProperties{ externalMemoryProperties_ }
{}
VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalBufferProperties( *reinterpret_cast<ExternalBufferProperties const *>( &rhs ) )
{}
ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalBufferProperties const *>( &rhs );
return *this;
}
operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalBufferProperties*>( this );
}
operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalBufferProperties*>( this );
}
operator VkExternalBufferProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalBufferProperties*>( this );
}
operator VkExternalBufferProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalBufferProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, externalMemoryProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalBufferProperties const & ) const = default;
#else
bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( externalMemoryProperties == rhs.externalMemoryProperties );
#endif
}
bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalBufferProperties>
{
using Type = ExternalBufferProperties;
};
using ExternalBufferPropertiesKHR = ExternalBufferProperties;
// wrapper struct for struct VkExternalComputeQueueCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueCreateInfoNV.html
struct ExternalComputeQueueCreateInfoNV
{
using NativeType = VkExternalComputeQueueCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalComputeQueueCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalComputeQueueCreateInfoNV(VULKAN_HPP_NAMESPACE::Queue preferredQueue_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, preferredQueue{ preferredQueue_ }
{}
VULKAN_HPP_CONSTEXPR ExternalComputeQueueCreateInfoNV( ExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalComputeQueueCreateInfoNV( VkExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalComputeQueueCreateInfoNV( *reinterpret_cast<ExternalComputeQueueCreateInfoNV const *>( &rhs ) )
{}
ExternalComputeQueueCreateInfoNV & operator=( ExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalComputeQueueCreateInfoNV & operator=( VkExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueCreateInfoNV & setPreferredQueue( VULKAN_HPP_NAMESPACE::Queue preferredQueue_ ) VULKAN_HPP_NOEXCEPT
{
preferredQueue = preferredQueue_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExternalComputeQueueCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalComputeQueueCreateInfoNV*>( this );
}
operator VkExternalComputeQueueCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalComputeQueueCreateInfoNV*>( this );
}
operator VkExternalComputeQueueCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalComputeQueueCreateInfoNV*>( this );
}
operator VkExternalComputeQueueCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalComputeQueueCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Queue const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, preferredQueue );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalComputeQueueCreateInfoNV const & ) const = default;
#else
bool operator==( ExternalComputeQueueCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( preferredQueue == rhs.preferredQueue );
#endif
}
bool operator!=( ExternalComputeQueueCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalComputeQueueCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Queue preferredQueue = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalComputeQueueCreateInfoNV>
{
using Type = ExternalComputeQueueCreateInfoNV;
};
// wrapper struct for struct VkExternalComputeQueueDataParamsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueDataParamsNV.html
struct ExternalComputeQueueDataParamsNV
{
using NativeType = VkExternalComputeQueueDataParamsNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalComputeQueueDataParamsNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalComputeQueueDataParamsNV(uint32_t deviceIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceIndex{ deviceIndex_ }
{}
VULKAN_HPP_CONSTEXPR ExternalComputeQueueDataParamsNV( ExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalComputeQueueDataParamsNV( VkExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalComputeQueueDataParamsNV( *reinterpret_cast<ExternalComputeQueueDataParamsNV const *>( &rhs ) )
{}
ExternalComputeQueueDataParamsNV & operator=( ExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalComputeQueueDataParamsNV & operator=( VkExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalComputeQueueDataParamsNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDataParamsNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDataParamsNV & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndex = deviceIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExternalComputeQueueDataParamsNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalComputeQueueDataParamsNV*>( this );
}
operator VkExternalComputeQueueDataParamsNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalComputeQueueDataParamsNV*>( this );
}
operator VkExternalComputeQueueDataParamsNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalComputeQueueDataParamsNV*>( this );
}
operator VkExternalComputeQueueDataParamsNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalComputeQueueDataParamsNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalComputeQueueDataParamsNV const & ) const = default;
#else
bool operator==( ExternalComputeQueueDataParamsNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceIndex == rhs.deviceIndex );
#endif
}
bool operator!=( ExternalComputeQueueDataParamsNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalComputeQueueDataParamsNV;
const void * pNext = {};
uint32_t deviceIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalComputeQueueDataParamsNV>
{
using Type = ExternalComputeQueueDataParamsNV;
};
// wrapper struct for struct VkExternalComputeQueueDeviceCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueDeviceCreateInfoNV.html
struct ExternalComputeQueueDeviceCreateInfoNV
{
using NativeType = VkExternalComputeQueueDeviceCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalComputeQueueDeviceCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalComputeQueueDeviceCreateInfoNV(uint32_t reservedExternalQueues_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, reservedExternalQueues{ reservedExternalQueues_ }
{}
VULKAN_HPP_CONSTEXPR ExternalComputeQueueDeviceCreateInfoNV( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalComputeQueueDeviceCreateInfoNV( VkExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalComputeQueueDeviceCreateInfoNV( *reinterpret_cast<ExternalComputeQueueDeviceCreateInfoNV const *>( &rhs ) )
{}
ExternalComputeQueueDeviceCreateInfoNV & operator=( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalComputeQueueDeviceCreateInfoNV & operator=( VkExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalComputeQueueDeviceCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDeviceCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDeviceCreateInfoNV & setReservedExternalQueues( uint32_t reservedExternalQueues_ ) VULKAN_HPP_NOEXCEPT
{
reservedExternalQueues = reservedExternalQueues_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExternalComputeQueueDeviceCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalComputeQueueDeviceCreateInfoNV*>( this );
}
operator VkExternalComputeQueueDeviceCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalComputeQueueDeviceCreateInfoNV*>( this );
}
operator VkExternalComputeQueueDeviceCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalComputeQueueDeviceCreateInfoNV*>( this );
}
operator VkExternalComputeQueueDeviceCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalComputeQueueDeviceCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, reservedExternalQueues );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalComputeQueueDeviceCreateInfoNV const & ) const = default;
#else
bool operator==( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( reservedExternalQueues == rhs.reservedExternalQueues );
#endif
}
bool operator!=( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalComputeQueueDeviceCreateInfoNV;
const void * pNext = {};
uint32_t reservedExternalQueues = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalComputeQueueDeviceCreateInfoNV>
{
using Type = ExternalComputeQueueDeviceCreateInfoNV;
};
// wrapper struct for struct VkExternalFenceProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFenceProperties.html
struct ExternalFenceProperties
{
using NativeType = VkExternalFenceProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalFenceProperties(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ }, compatibleHandleTypes{ compatibleHandleTypes_ }, externalFenceFeatures{ externalFenceFeatures_ }
{}
VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalFenceProperties( *reinterpret_cast<ExternalFenceProperties const *>( &rhs ) )
{}
ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFenceProperties const *>( &rhs );
return *this;
}
operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalFenceProperties*>( this );
}
operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalFenceProperties*>( this );
}
operator VkExternalFenceProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalFenceProperties*>( this );
}
operator VkExternalFenceProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalFenceProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalFenceFeatures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalFenceProperties const & ) const = default;
#else
bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
&& ( compatibleHandleTypes == rhs.compatibleHandleTypes )
&& ( externalFenceFeatures == rhs.externalFenceFeatures );
#endif
}
bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {};
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {};
VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalFenceProperties>
{
using Type = ExternalFenceProperties;
};
using ExternalFencePropertiesKHR = ExternalFenceProperties;
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
// wrapper struct for struct VkExternalFormatANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFormatANDROID.html
struct ExternalFormatANDROID
{
using NativeType = VkExternalFormatANDROID;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalFormatANDROID(uint64_t externalFormat_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, externalFormat{ externalFormat_ }
{}
VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalFormatANDROID( *reinterpret_cast<ExternalFormatANDROID const *>( &rhs ) )
{}
ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
{
externalFormat = externalFormat_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalFormatANDROID*>( this );
}
operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalFormatANDROID*>( this );
}
operator VkExternalFormatANDROID const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalFormatANDROID*>( this );
}
operator VkExternalFormatANDROID *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalFormatANDROID*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, externalFormat );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalFormatANDROID const & ) const = default;
#else
bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( externalFormat == rhs.externalFormat );
#endif
}
bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID;
void * pNext = {};
uint64_t externalFormat = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalFormatANDROID>
{
using Type = ExternalFormatANDROID;
};
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
// wrapper struct for struct VkExternalFormatQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFormatQNX.html
struct ExternalFormatQNX
{
using NativeType = VkExternalFormatQNX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatQNX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalFormatQNX(uint64_t externalFormat_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, externalFormat{ externalFormat_ }
{}
VULKAN_HPP_CONSTEXPR ExternalFormatQNX( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalFormatQNX( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalFormatQNX( *reinterpret_cast<ExternalFormatQNX const *>( &rhs ) )
{}
ExternalFormatQNX & operator=( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalFormatQNX & operator=( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFormatQNX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
{
externalFormat = externalFormat_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExternalFormatQNX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalFormatQNX*>( this );
}
operator VkExternalFormatQNX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalFormatQNX*>( this );
}
operator VkExternalFormatQNX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalFormatQNX*>( this );
}
operator VkExternalFormatQNX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalFormatQNX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, externalFormat );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalFormatQNX const & ) const = default;
#else
bool operator==( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( externalFormat == rhs.externalFormat );
#endif
}
bool operator!=( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatQNX;
void * pNext = {};
uint64_t externalFormat = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalFormatQNX>
{
using Type = ExternalFormatQNX;
};
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
// wrapper struct for struct VkExternalImageFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalImageFormatProperties.html
struct ExternalImageFormatProperties
{
using NativeType = VkExternalImageFormatProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, externalMemoryProperties{ externalMemoryProperties_ }
{}
VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalImageFormatProperties( *reinterpret_cast<ExternalImageFormatProperties const *>( &rhs ) )
{}
ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const *>( &rhs );
return *this;
}
operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalImageFormatProperties*>( this );
}
operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalImageFormatProperties*>( this );
}
operator VkExternalImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalImageFormatProperties*>( this );
}
operator VkExternalImageFormatProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalImageFormatProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, externalMemoryProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalImageFormatProperties const & ) const = default;
#else
bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( externalMemoryProperties == rhs.externalMemoryProperties );
#endif
}
bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalImageFormatProperties>
{
using Type = ExternalImageFormatProperties;
};
using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
// wrapper struct for struct VkImageFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatProperties.html
struct ImageFormatProperties
{
using NativeType = VkImageFormatProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageFormatProperties(VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, uint32_t maxMipLevels_ = {}, uint32_t maxArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {}) VULKAN_HPP_NOEXCEPT
: maxExtent{ maxExtent_ }, maxMipLevels{ maxMipLevels_ }, maxArrayLayers{ maxArrayLayers_ }, sampleCounts{ sampleCounts_ }, maxResourceSize{ maxResourceSize_ }
{}
VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageFormatProperties( *reinterpret_cast<ImageFormatProperties const *>( &rhs ) )
{}
ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties const *>( &rhs );
return *this;
}
operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageFormatProperties*>( this );
}
operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageFormatProperties*>( this );
}
operator VkImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageFormatProperties*>( this );
}
operator VkImageFormatProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageFormatProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Extent3D const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( maxExtent, maxMipLevels, maxArrayLayers, sampleCounts, maxResourceSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageFormatProperties const & ) const = default;
#else
bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( maxExtent == rhs.maxExtent )
&& ( maxMipLevels == rhs.maxMipLevels )
&& ( maxArrayLayers == rhs.maxArrayLayers )
&& ( sampleCounts == rhs.sampleCounts )
&& ( maxResourceSize == rhs.maxResourceSize );
#endif
}
bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {};
uint32_t maxMipLevels = {};
uint32_t maxArrayLayers = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {};
};
// wrapper struct for struct VkExternalImageFormatPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalImageFormatPropertiesNV.html
struct ExternalImageFormatPropertiesNV
{
using NativeType = VkExternalImageFormatPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT
: imageFormatProperties{ imageFormatProperties_ }, externalMemoryFeatures{ externalMemoryFeatures_ }, exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ }, compatibleHandleTypes{ compatibleHandleTypes_ }
{}
VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalImageFormatPropertiesNV( *reinterpret_cast<ExternalImageFormatPropertiesNV const *>( &rhs ) )
{}
ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const *>( &rhs );
return *this;
}
operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>( this );
}
operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalImageFormatPropertiesNV*>( this );
}
operator VkExternalImageFormatPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalImageFormatPropertiesNV*>( this );
}
operator VkExternalImageFormatPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalImageFormatPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ImageFormatProperties const &, VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default;
#else
bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( imageFormatProperties == rhs.imageFormatProperties )
&& ( externalMemoryFeatures == rhs.externalMemoryFeatures )
&& ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
&& ( compatibleHandleTypes == rhs.compatibleHandleTypes );
#endif
}
bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {};
};
// wrapper struct for struct VkExternalMemoryAcquireUnmodifiedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryAcquireUnmodifiedEXT.html
struct ExternalMemoryAcquireUnmodifiedEXT
{
using NativeType = VkExternalMemoryAcquireUnmodifiedEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryAcquireUnmodifiedEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT(VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, acquireUnmodifiedMemory{ acquireUnmodifiedMemory_ }
{}
VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryAcquireUnmodifiedEXT( VkExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalMemoryAcquireUnmodifiedEXT( *reinterpret_cast<ExternalMemoryAcquireUnmodifiedEXT const *>( &rhs ) )
{}
ExternalMemoryAcquireUnmodifiedEXT & operator=( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalMemoryAcquireUnmodifiedEXT & operator=( VkExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT & setAcquireUnmodifiedMemory( VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory_ ) VULKAN_HPP_NOEXCEPT
{
acquireUnmodifiedMemory = acquireUnmodifiedMemory_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExternalMemoryAcquireUnmodifiedEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalMemoryAcquireUnmodifiedEXT*>( this );
}
operator VkExternalMemoryAcquireUnmodifiedEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalMemoryAcquireUnmodifiedEXT*>( this );
}
operator VkExternalMemoryAcquireUnmodifiedEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalMemoryAcquireUnmodifiedEXT*>( this );
}
operator VkExternalMemoryAcquireUnmodifiedEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalMemoryAcquireUnmodifiedEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, acquireUnmodifiedMemory );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalMemoryAcquireUnmodifiedEXT const & ) const = default;
#else
bool operator==( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( acquireUnmodifiedMemory == rhs.acquireUnmodifiedMemory );
#endif
}
bool operator!=( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryAcquireUnmodifiedEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalMemoryAcquireUnmodifiedEXT>
{
using Type = ExternalMemoryAcquireUnmodifiedEXT;
};
// wrapper struct for struct VkExternalMemoryBufferCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryBufferCreateInfo.html
struct ExternalMemoryBufferCreateInfo
{
using NativeType = VkExternalMemoryBufferCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleTypes{ handleTypes_ }
{}
VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalMemoryBufferCreateInfo( *reinterpret_cast<ExternalMemoryBufferCreateInfo const *>( &rhs ) )
{}
ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo*>( this );
}
operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalMemoryBufferCreateInfo*>( this );
}
operator VkExternalMemoryBufferCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalMemoryBufferCreateInfo*>( this );
}
operator VkExternalMemoryBufferCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalMemoryBufferCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleTypes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default;
#else
bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleTypes == rhs.handleTypes );
#endif
}
bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalMemoryBufferCreateInfo>
{
using Type = ExternalMemoryBufferCreateInfo;
};
using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
// wrapper struct for struct VkExternalMemoryImageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryImageCreateInfo.html
struct ExternalMemoryImageCreateInfo
{
using NativeType = VkExternalMemoryImageCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleTypes{ handleTypes_ }
{}
VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalMemoryImageCreateInfo( *reinterpret_cast<ExternalMemoryImageCreateInfo const *>( &rhs ) )
{}
ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalMemoryImageCreateInfo*>( this );
}
operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalMemoryImageCreateInfo*>( this );
}
operator VkExternalMemoryImageCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalMemoryImageCreateInfo*>( this );
}
operator VkExternalMemoryImageCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalMemoryImageCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleTypes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default;
#else
bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleTypes == rhs.handleTypes );
#endif
}
bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfo>
{
using Type = ExternalMemoryImageCreateInfo;
};
using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
// wrapper struct for struct VkExternalMemoryImageCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryImageCreateInfoNV.html
struct ExternalMemoryImageCreateInfoNV
{
using NativeType = VkExternalMemoryImageCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleTypes{ handleTypes_ }
{}
VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalMemoryImageCreateInfoNV( *reinterpret_cast<ExternalMemoryImageCreateInfoNV const *>( &rhs ) )
{}
ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>( this );
}
operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalMemoryImageCreateInfoNV*>( this );
}
operator VkExternalMemoryImageCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>( this );
}
operator VkExternalMemoryImageCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalMemoryImageCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleTypes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default;
#else
bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleTypes == rhs.handleTypes );
#endif
}
bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfoNV>
{
using Type = ExternalMemoryImageCreateInfoNV;
};
// wrapper struct for struct VkExternalMemoryTensorCreateInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryTensorCreateInfoARM.html
struct ExternalMemoryTensorCreateInfoARM
{
using NativeType = VkExternalMemoryTensorCreateInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryTensorCreateInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalMemoryTensorCreateInfoARM(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleTypes{ handleTypes_ }
{}
VULKAN_HPP_CONSTEXPR ExternalMemoryTensorCreateInfoARM( ExternalMemoryTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryTensorCreateInfoARM( VkExternalMemoryTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalMemoryTensorCreateInfoARM( *reinterpret_cast<ExternalMemoryTensorCreateInfoARM const *>( &rhs ) )
{}
ExternalMemoryTensorCreateInfoARM & operator=( ExternalMemoryTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalMemoryTensorCreateInfoARM & operator=( VkExternalMemoryTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryTensorCreateInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryTensorCreateInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryTensorCreateInfoARM & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExternalMemoryTensorCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalMemoryTensorCreateInfoARM*>( this );
}
operator VkExternalMemoryTensorCreateInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalMemoryTensorCreateInfoARM*>( this );
}
operator VkExternalMemoryTensorCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalMemoryTensorCreateInfoARM*>( this );
}
operator VkExternalMemoryTensorCreateInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalMemoryTensorCreateInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleTypes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalMemoryTensorCreateInfoARM const & ) const = default;
#else
bool operator==( ExternalMemoryTensorCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleTypes == rhs.handleTypes );
#endif
}
bool operator!=( ExternalMemoryTensorCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryTensorCreateInfoARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalMemoryTensorCreateInfoARM>
{
using Type = ExternalMemoryTensorCreateInfoARM;
};
// wrapper struct for struct VkExternalSemaphoreProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalSemaphoreProperties.html
struct ExternalSemaphoreProperties
{
using NativeType = VkExternalSemaphoreProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ }, compatibleHandleTypes{ compatibleHandleTypes_ }, externalSemaphoreFeatures{ externalSemaphoreFeatures_ }
{}
VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalSemaphoreProperties( *reinterpret_cast<ExternalSemaphoreProperties const *>( &rhs ) )
{}
ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const *>( &rhs );
return *this;
}
operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalSemaphoreProperties*>( this );
}
operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalSemaphoreProperties*>( this );
}
operator VkExternalSemaphoreProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalSemaphoreProperties*>( this );
}
operator VkExternalSemaphoreProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalSemaphoreProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalSemaphoreFeatures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalSemaphoreProperties const & ) const = default;
#else
bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
&& ( compatibleHandleTypes == rhs.compatibleHandleTypes )
&& ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
#endif
}
bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalSemaphoreProperties>
{
using Type = ExternalSemaphoreProperties;
};
using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
// wrapper struct for struct VkExternalTensorPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalTensorPropertiesARM.html
struct ExternalTensorPropertiesARM
{
using NativeType = VkExternalTensorPropertiesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalTensorPropertiesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExternalTensorPropertiesARM(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, externalMemoryProperties{ externalMemoryProperties_ }
{}
VULKAN_HPP_CONSTEXPR ExternalTensorPropertiesARM( ExternalTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalTensorPropertiesARM( VkExternalTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalTensorPropertiesARM( *reinterpret_cast<ExternalTensorPropertiesARM const *>( &rhs ) )
{}
ExternalTensorPropertiesARM & operator=( ExternalTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ExternalTensorPropertiesARM & operator=( VkExternalTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ExternalTensorPropertiesARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ExternalTensorPropertiesARM & setExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const & externalMemoryProperties_ ) VULKAN_HPP_NOEXCEPT
{
externalMemoryProperties = externalMemoryProperties_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkExternalTensorPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalTensorPropertiesARM*>( this );
}
operator VkExternalTensorPropertiesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalTensorPropertiesARM*>( this );
}
operator VkExternalTensorPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkExternalTensorPropertiesARM*>( this );
}
operator VkExternalTensorPropertiesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkExternalTensorPropertiesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, externalMemoryProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ExternalTensorPropertiesARM const & ) const = default;
#else
bool operator==( ExternalTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( externalMemoryProperties == rhs.externalMemoryProperties );
#endif
}
bool operator!=( ExternalTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalTensorPropertiesARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
};
template <>
struct CppType<StructureType, StructureType::eExternalTensorPropertiesARM>
{
using Type = ExternalTensorPropertiesARM;
};
// wrapper struct for struct VkFenceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceCreateInfo.html
struct FenceCreateInfo
{
using NativeType = VkFenceCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FenceCreateInfo(VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: FenceCreateInfo( *reinterpret_cast<FenceCreateInfo const *>( &rhs ) )
{}
FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFenceCreateInfo*>( this );
}
operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFenceCreateInfo*>( this );
}
operator VkFenceCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFenceCreateInfo*>( this );
}
operator VkFenceCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFenceCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FenceCreateFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FenceCreateInfo const & ) const = default;
#else
bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {};
};
template <>
struct CppType<StructureType, StructureType::eFenceCreateInfo>
{
using Type = FenceCreateInfo;
};
// wrapper struct for struct VkFenceGetFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceGetFdInfoKHR.html
struct FenceGetFdInfoKHR
{
using NativeType = VkFenceGetFdInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fence{ fence_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: FenceGetFdInfoKHR( *reinterpret_cast<FenceGetFdInfoKHR const *>( &rhs ) )
{}
FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
fence = fence_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFenceGetFdInfoKHR*>( this );
}
operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFenceGetFdInfoKHR*>( this );
}
operator VkFenceGetFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFenceGetFdInfoKHR*>( this );
}
operator VkFenceGetFdInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFenceGetFdInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Fence const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fence, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FenceGetFdInfoKHR const & ) const = default;
#else
bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fence == rhs.fence )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Fence fence = {};
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::eFenceGetFdInfoKHR>
{
using Type = FenceGetFdInfoKHR;
};
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkFenceGetWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceGetWin32HandleInfoKHR.html
struct FenceGetWin32HandleInfoKHR
{
using NativeType = VkFenceGetWin32HandleInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fence{ fence_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: FenceGetWin32HandleInfoKHR( *reinterpret_cast<FenceGetWin32HandleInfoKHR const *>( &rhs ) )
{}
FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
fence = fence_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( this );
}
operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFenceGetWin32HandleInfoKHR*>( this );
}
operator VkFenceGetWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( this );
}
operator VkFenceGetWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFenceGetWin32HandleInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Fence const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fence, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default;
#else
bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fence == rhs.fence )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Fence fence = {};
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::eFenceGetWin32HandleInfoKHR>
{
using Type = FenceGetWin32HandleInfoKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
// wrapper struct for struct VkFilterCubicImageViewImageFormatPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFilterCubicImageViewImageFormatPropertiesEXT.html
struct FilterCubicImageViewImageFormatPropertiesEXT
{
using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, filterCubic{ filterCubic_ }, filterCubicMinmax{ filterCubicMinmax_ }
{}
VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: FilterCubicImageViewImageFormatPropertiesEXT( *reinterpret_cast<FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs ) )
{}
FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs );
return *this;
}
operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
}
operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
}
operator VkFilterCubicImageViewImageFormatPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
}
operator VkFilterCubicImageViewImageFormatPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, filterCubic, filterCubicMinmax );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default;
#else
bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( filterCubic == rhs.filterCubic )
&& ( filterCubicMinmax == rhs.filterCubicMinmax );
#endif
}
bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {};
VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {};
};
template <>
struct CppType<StructureType, StructureType::eFilterCubicImageViewImageFormatPropertiesEXT>
{
using Type = FilterCubicImageViewImageFormatPropertiesEXT;
};
// wrapper struct for struct VkFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatProperties.html
struct FormatProperties
{
using NativeType = VkFormatProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FormatProperties(VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {}) VULKAN_HPP_NOEXCEPT
: linearTilingFeatures{ linearTilingFeatures_ }, optimalTilingFeatures{ optimalTilingFeatures_ }, bufferFeatures{ bufferFeatures_ }
{}
VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: FormatProperties( *reinterpret_cast<FormatProperties const *>( &rhs ) )
{}
FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties const *>( &rhs );
return *this;
}
operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFormatProperties*>( this );
}
operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFormatProperties*>( this );
}
operator VkFormatProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFormatProperties*>( this );
}
operator VkFormatProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFormatProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( linearTilingFeatures, optimalTilingFeatures, bufferFeatures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FormatProperties const & ) const = default;
#else
bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( linearTilingFeatures == rhs.linearTilingFeatures )
&& ( optimalTilingFeatures == rhs.optimalTilingFeatures )
&& ( bufferFeatures == rhs.bufferFeatures );
#endif
}
bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {};
};
// wrapper struct for struct VkFormatProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatProperties2.html
struct FormatProperties2
{
using NativeType = VkFormatProperties2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FormatProperties2(VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, formatProperties{ formatProperties_ }
{}
VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
: FormatProperties2( *reinterpret_cast<FormatProperties2 const *>( &rhs ) )
{}
FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties2 const *>( &rhs );
return *this;
}
operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFormatProperties2*>( this );
}
operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFormatProperties2*>( this );
}
operator VkFormatProperties2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFormatProperties2*>( this );
}
operator VkFormatProperties2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFormatProperties2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FormatProperties const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, formatProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FormatProperties2 const & ) const = default;
#else
bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( formatProperties == rhs.formatProperties );
#endif
}
bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2;
void * pNext = {};
VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {};
};
template <>
struct CppType<StructureType, StructureType::eFormatProperties2>
{
using Type = FormatProperties2;
};
using FormatProperties2KHR = FormatProperties2;
// wrapper struct for struct VkFormatProperties3, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatProperties3.html
struct FormatProperties3
{
using NativeType = VkFormatProperties3;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties3;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FormatProperties3(VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, linearTilingFeatures{ linearTilingFeatures_ }, optimalTilingFeatures{ optimalTilingFeatures_ }, bufferFeatures{ bufferFeatures_ }
{}
VULKAN_HPP_CONSTEXPR FormatProperties3( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT
: FormatProperties3( *reinterpret_cast<FormatProperties3 const *>( &rhs ) )
{}
FormatProperties3 & operator=( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FormatProperties3 & operator=( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties3 const *>( &rhs );
return *this;
}
operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFormatProperties3*>( this );
}
operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFormatProperties3*>( this );
}
operator VkFormatProperties3 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFormatProperties3*>( this );
}
operator VkFormatProperties3 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFormatProperties3*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, linearTilingFeatures, optimalTilingFeatures, bufferFeatures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FormatProperties3 const & ) const = default;
#else
bool operator==( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( linearTilingFeatures == rhs.linearTilingFeatures )
&& ( optimalTilingFeatures == rhs.optimalTilingFeatures )
&& ( bufferFeatures == rhs.bufferFeatures );
#endif
}
bool operator!=( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties3;
void * pNext = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures = {};
};
template <>
struct CppType<StructureType, StructureType::eFormatProperties3>
{
using Type = FormatProperties3;
};
using FormatProperties3KHR = FormatProperties3;
// wrapper struct for struct VkFragmentShadingRateAttachmentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFragmentShadingRateAttachmentInfoKHR.html
struct FragmentShadingRateAttachmentInfoKHR
{
using NativeType = VkFragmentShadingRateAttachmentInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR(const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {}, VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pFragmentShadingRateAttachment{ pFragmentShadingRateAttachment_ }, shadingRateAttachmentTexelSize{ shadingRateAttachmentTexelSize_ }
{}
VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<FragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
{}
FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPFragmentShadingRateAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFragmentShadingRateAttachmentInfoKHR*>( this );
}
operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFragmentShadingRateAttachmentInfoKHR*>( this );
}
operator VkFragmentShadingRateAttachmentInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFragmentShadingRateAttachmentInfoKHR*>( this );
}
operator VkFragmentShadingRateAttachmentInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFragmentShadingRateAttachmentInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pFragmentShadingRateAttachment, shadingRateAttachmentTexelSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default;
#else
bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment )
&& ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
#endif
}
bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
const void * pNext = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {};
VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {};
};
template <>
struct CppType<StructureType, StructureType::eFragmentShadingRateAttachmentInfoKHR>
{
using Type = FragmentShadingRateAttachmentInfoKHR;
};
// wrapper struct for struct VkFrameBoundaryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFrameBoundaryEXT.html
struct FrameBoundaryEXT
{
using NativeType = VkFrameBoundaryEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFrameBoundaryEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FrameBoundaryEXT(VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags_ = {}, uint64_t frameID_ = {}, uint32_t imageCount_ = {}, const VULKAN_HPP_NAMESPACE::Image * pImages_ = {}, uint32_t bufferCount_ = {}, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void * pTag_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, frameID{ frameID_ }, imageCount{ imageCount_ }, pImages{ pImages_ }, bufferCount{ bufferCount_ }, pBuffers{ pBuffers_ }, tagName{ tagName_ }, tagSize{ tagSize_ }, pTag{ pTag_ }
{}
VULKAN_HPP_CONSTEXPR FrameBoundaryEXT( FrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FrameBoundaryEXT( VkFrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: FrameBoundaryEXT( *reinterpret_cast<FrameBoundaryEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
FrameBoundaryEXT( VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags_, uint64_t frameID_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Image> const & images_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers_ = {}, uint64_t tagName_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), frameID( frameID_ ), imageCount( static_cast<uint32_t>( images_.size() ) ), pImages( images_.data() ), bufferCount( static_cast<uint32_t>( buffers_.size() ) ), pBuffers( buffers_.data() ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
FrameBoundaryEXT & operator=( FrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FrameBoundaryEXT & operator=( VkFrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FrameBoundaryEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setFlags( VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setFrameID( uint64_t frameID_ ) VULKAN_HPP_NOEXCEPT
{
frameID = frameID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setImageCount( uint32_t imageCount_ ) VULKAN_HPP_NOEXCEPT
{
imageCount = imageCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPImages( const VULKAN_HPP_NAMESPACE::Image * pImages_ ) VULKAN_HPP_NOEXCEPT
{
pImages = pImages_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
FrameBoundaryEXT & setImages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Image> const & images_ ) VULKAN_HPP_NOEXCEPT
{
imageCount = static_cast<uint32_t>( images_.size() );
pImages = images_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT
{
bufferCount = bufferCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPBuffers( const VULKAN_HPP_NAMESPACE::Buffer * pBuffers_ ) VULKAN_HPP_NOEXCEPT
{
pBuffers = pBuffers_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
FrameBoundaryEXT & setBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers_ ) VULKAN_HPP_NOEXCEPT
{
bufferCount = static_cast<uint32_t>( buffers_.size() );
pBuffers = buffers_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
{
tagName = tagName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
{
tagSize = tagSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
{
pTag = pTag_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
FrameBoundaryEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
{
tagSize = tag_.size() * sizeof(T);
pTag = tag_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkFrameBoundaryEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFrameBoundaryEXT*>( this );
}
operator VkFrameBoundaryEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFrameBoundaryEXT*>( this );
}
operator VkFrameBoundaryEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFrameBoundaryEXT*>( this );
}
operator VkFrameBoundaryEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFrameBoundaryEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT const &, uint64_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Image * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Buffer * const &, uint64_t const &, size_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, frameID, imageCount, pImages, bufferCount, pBuffers, tagName, tagSize, pTag );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FrameBoundaryEXT const & ) const = default;
#else
bool operator==( FrameBoundaryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( frameID == rhs.frameID )
&& ( imageCount == rhs.imageCount )
&& ( pImages == rhs.pImages )
&& ( bufferCount == rhs.bufferCount )
&& ( pBuffers == rhs.pBuffers )
&& ( tagName == rhs.tagName )
&& ( tagSize == rhs.tagSize )
&& ( pTag == rhs.pTag );
#endif
}
bool operator!=( FrameBoundaryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFrameBoundaryEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags = {};
uint64_t frameID = {};
uint32_t imageCount = {};
const VULKAN_HPP_NAMESPACE::Image * pImages = {};
uint32_t bufferCount = {};
const VULKAN_HPP_NAMESPACE::Buffer * pBuffers = {};
uint64_t tagName = {};
size_t tagSize = {};
const void * pTag = {};
};
template <>
struct CppType<StructureType, StructureType::eFrameBoundaryEXT>
{
using Type = FrameBoundaryEXT;
};
// wrapper struct for struct VkFrameBoundaryTensorsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFrameBoundaryTensorsARM.html
struct FrameBoundaryTensorsARM
{
using NativeType = VkFrameBoundaryTensorsARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFrameBoundaryTensorsARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FrameBoundaryTensorsARM(uint32_t tensorCount_ = {}, const VULKAN_HPP_NAMESPACE::TensorARM * pTensors_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tensorCount{ tensorCount_ }, pTensors{ pTensors_ }
{}
VULKAN_HPP_CONSTEXPR FrameBoundaryTensorsARM( FrameBoundaryTensorsARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FrameBoundaryTensorsARM( VkFrameBoundaryTensorsARM const & rhs ) VULKAN_HPP_NOEXCEPT
: FrameBoundaryTensorsARM( *reinterpret_cast<FrameBoundaryTensorsARM const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
FrameBoundaryTensorsARM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::TensorARM> const & tensors_, const void * pNext_ = nullptr )
: pNext( pNext_ ), tensorCount( static_cast<uint32_t>( tensors_.size() ) ), pTensors( tensors_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
FrameBoundaryTensorsARM & operator=( FrameBoundaryTensorsARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FrameBoundaryTensorsARM & operator=( VkFrameBoundaryTensorsARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FrameBoundaryTensorsARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryTensorsARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryTensorsARM & setTensorCount( uint32_t tensorCount_ ) VULKAN_HPP_NOEXCEPT
{
tensorCount = tensorCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FrameBoundaryTensorsARM & setPTensors( const VULKAN_HPP_NAMESPACE::TensorARM * pTensors_ ) VULKAN_HPP_NOEXCEPT
{
pTensors = pTensors_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
FrameBoundaryTensorsARM & setTensors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::TensorARM> const & tensors_ ) VULKAN_HPP_NOEXCEPT
{
tensorCount = static_cast<uint32_t>( tensors_.size() );
pTensors = tensors_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkFrameBoundaryTensorsARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFrameBoundaryTensorsARM*>( this );
}
operator VkFrameBoundaryTensorsARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFrameBoundaryTensorsARM*>( this );
}
operator VkFrameBoundaryTensorsARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFrameBoundaryTensorsARM*>( this );
}
operator VkFrameBoundaryTensorsARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFrameBoundaryTensorsARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::TensorARM * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tensorCount, pTensors );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FrameBoundaryTensorsARM const & ) const = default;
#else
bool operator==( FrameBoundaryTensorsARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tensorCount == rhs.tensorCount )
&& ( pTensors == rhs.pTensors );
#endif
}
bool operator!=( FrameBoundaryTensorsARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFrameBoundaryTensorsARM;
const void * pNext = {};
uint32_t tensorCount = {};
const VULKAN_HPP_NAMESPACE::TensorARM * pTensors = {};
};
template <>
struct CppType<StructureType, StructureType::eFrameBoundaryTensorsARM>
{
using Type = FrameBoundaryTensorsARM;
};
// wrapper struct for struct VkFramebufferAttachmentImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferAttachmentImageInfo.html
struct FramebufferAttachmentImageInfo
{
using NativeType = VkFramebufferAttachmentImageInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layerCount_ = {}, uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, usage{ usage_ }, width{ width_ }, height{ height_ }, layerCount{ layerCount_ }, viewFormatCount{ viewFormatCount_ }, pViewFormats{ pViewFormats_ }
{}
VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: FramebufferAttachmentImageInfo( *reinterpret_cast<FramebufferAttachmentImageInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, uint32_t width_, uint32_t height_, uint32_t layerCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
{
width = width_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
{
height = height_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
{
layerCount = layerCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
{
viewFormatCount = viewFormatCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
{
pViewFormats = pViewFormats_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
FramebufferAttachmentImageInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
{
viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
pViewFormats = viewFormats_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFramebufferAttachmentImageInfo*>( this );
}
operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFramebufferAttachmentImageInfo*>( this );
}
operator VkFramebufferAttachmentImageInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFramebufferAttachmentImageInfo*>( this );
}
operator VkFramebufferAttachmentImageInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFramebufferAttachmentImageInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageCreateFlags const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, usage, width, height, layerCount, viewFormatCount, pViewFormats );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default;
#else
bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( usage == rhs.usage )
&& ( width == rhs.width )
&& ( height == rhs.height )
&& ( layerCount == rhs.layerCount )
&& ( viewFormatCount == rhs.viewFormatCount )
&& ( pViewFormats == rhs.pViewFormats );
#endif
}
bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
uint32_t width = {};
uint32_t height = {};
uint32_t layerCount = {};
uint32_t viewFormatCount = {};
const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {};
};
template <>
struct CppType<StructureType, StructureType::eFramebufferAttachmentImageInfo>
{
using Type = FramebufferAttachmentImageInfo;
};
using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;
// wrapper struct for struct VkFramebufferAttachmentsCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferAttachmentsCreateInfo.html
struct FramebufferAttachmentsCreateInfo
{
using NativeType = VkFramebufferAttachmentsCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo(uint32_t attachmentImageInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, attachmentImageInfoCount{ attachmentImageInfoCount_ }, pAttachmentImageInfos{ pAttachmentImageInfos_ }
{}
VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: FramebufferAttachmentsCreateInfo( *reinterpret_cast<FramebufferAttachmentsCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
FramebufferAttachmentsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_, const void * pNext_ = nullptr )
: pNext( pNext_ ), attachmentImageInfoCount( static_cast<uint32_t>( attachmentImageInfos_.size() ) ), pAttachmentImageInfos( attachmentImageInfos_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentImageInfoCount = attachmentImageInfoCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
{
pAttachmentImageInfos = pAttachmentImageInfos_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
{
attachmentImageInfoCount = static_cast<uint32_t>( attachmentImageInfos_.size() );
pAttachmentImageInfos = attachmentImageInfos_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfo*>( this );
}
operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFramebufferAttachmentsCreateInfo*>( this );
}
operator VkFramebufferAttachmentsCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFramebufferAttachmentsCreateInfo*>( this );
}
operator VkFramebufferAttachmentsCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFramebufferAttachmentsCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, attachmentImageInfoCount, pAttachmentImageInfos );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default;
#else
bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( attachmentImageInfoCount == rhs.attachmentImageInfoCount )
&& ( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
#endif
}
bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo;
const void * pNext = {};
uint32_t attachmentImageInfoCount = {};
const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {};
};
template <>
struct CppType<StructureType, StructureType::eFramebufferAttachmentsCreateInfo>
{
using Type = FramebufferAttachmentsCreateInfo;
};
using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;
// wrapper struct for struct VkFramebufferCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferCreateInfo.html
struct FramebufferCreateInfo
{
using NativeType = VkFramebufferCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FramebufferCreateInfo(VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, renderPass{ renderPass_ }, attachmentCount{ attachmentCount_ }, pAttachments{ pAttachments_ }, width{ width_ }, height{ height_ }, layers{ layers_ }
{}
VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: FramebufferCreateInfo( *reinterpret_cast<FramebufferCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), renderPass( renderPass_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), width( width_ ), height( height_ ), layers( layers_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = attachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
FramebufferCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = static_cast<uint32_t>( attachments_.size() );
pAttachments = attachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
{
width = width_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
{
height = height_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT
{
layers = layers_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFramebufferCreateInfo*>( this );
}
operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFramebufferCreateInfo*>( this );
}
operator VkFramebufferCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFramebufferCreateInfo*>( this );
}
operator VkFramebufferCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFramebufferCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FramebufferCreateFlags const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageView * const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, renderPass, attachmentCount, pAttachments, width, height, layers );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FramebufferCreateInfo const & ) const = default;
#else
bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( renderPass == rhs.renderPass )
&& ( attachmentCount == rhs.attachmentCount )
&& ( pAttachments == rhs.pAttachments )
&& ( width == rhs.width )
&& ( height == rhs.height )
&& ( layers == rhs.layers );
#endif
}
bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
uint32_t attachmentCount = {};
const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {};
uint32_t width = {};
uint32_t height = {};
uint32_t layers = {};
};
template <>
struct CppType<StructureType, StructureType::eFramebufferCreateInfo>
{
using Type = FramebufferCreateInfo;
};
// wrapper struct for struct VkFramebufferMixedSamplesCombinationNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferMixedSamplesCombinationNV.html
struct FramebufferMixedSamplesCombinationNV
{
using NativeType = VkFramebufferMixedSamplesCombinationNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, coverageReductionMode{ coverageReductionMode_ }, rasterizationSamples{ rasterizationSamples_ }, depthStencilSamples{ depthStencilSamples_ }, colorSamples{ colorSamples_ }
{}
VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
: FramebufferMixedSamplesCombinationNV( *reinterpret_cast<FramebufferMixedSamplesCombinationNV const *>( &rhs ) )
{}
FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const *>( &rhs );
return *this;
}
operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV*>( this );
}
operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( this );
}
operator VkFramebufferMixedSamplesCombinationNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV*>( this );
}
operator VkFramebufferMixedSamplesCombinationNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, coverageReductionMode, rasterizationSamples, depthStencilSamples, colorSamples );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default;
#else
bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( coverageReductionMode == rhs.coverageReductionMode )
&& ( rasterizationSamples == rhs.rasterizationSamples )
&& ( depthStencilSamples == rhs.depthStencilSamples )
&& ( colorSamples == rhs.colorSamples );
#endif
}
bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {};
};
template <>
struct CppType<StructureType, StructureType::eFramebufferMixedSamplesCombinationNV>
{
using Type = FramebufferMixedSamplesCombinationNV;
};
// wrapper struct for struct VkGeneratedCommandsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsInfoEXT.html
struct GeneratedCommandsInfoEXT
{
using NativeType = VkGeneratedCommandsInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoEXT(VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ = {}, VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress indirectAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indirectAddressSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress preprocessAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, uint32_t maxSequenceCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress sequenceCountAddress_ = {}, uint32_t maxDrawCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderStages{ shaderStages_ }, indirectExecutionSet{ indirectExecutionSet_ }, indirectCommandsLayout{ indirectCommandsLayout_ }, indirectAddress{ indirectAddress_ }, indirectAddressSize{ indirectAddressSize_ }, preprocessAddress{ preprocessAddress_ }, preprocessSize{ preprocessSize_ }, maxSequenceCount{ maxSequenceCount_ }, sequenceCountAddress{ sequenceCountAddress_ }, maxDrawCount{ maxDrawCount_ }
{}
VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoEXT( GeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GeneratedCommandsInfoEXT( VkGeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: GeneratedCommandsInfoEXT( *reinterpret_cast<GeneratedCommandsInfoEXT const *>( &rhs ) )
{}
GeneratedCommandsInfoEXT & operator=( GeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GeneratedCommandsInfoEXT & operator=( VkGeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setShaderStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT
{
shaderStages = shaderStages_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectExecutionSet( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ ) VULKAN_HPP_NOEXCEPT
{
indirectExecutionSet = indirectExecutionSet_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
{
indirectCommandsLayout = indirectCommandsLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectAddress( VULKAN_HPP_NAMESPACE::DeviceAddress indirectAddress_ ) VULKAN_HPP_NOEXCEPT
{
indirectAddress = indirectAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectAddressSize( VULKAN_HPP_NAMESPACE::DeviceSize indirectAddressSize_ ) VULKAN_HPP_NOEXCEPT
{
indirectAddressSize = indirectAddressSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPreprocessAddress( VULKAN_HPP_NAMESPACE::DeviceAddress preprocessAddress_ ) VULKAN_HPP_NOEXCEPT
{
preprocessAddress = preprocessAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
{
preprocessSize = preprocessSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setMaxSequenceCount( uint32_t maxSequenceCount_ ) VULKAN_HPP_NOEXCEPT
{
maxSequenceCount = maxSequenceCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setSequenceCountAddress( VULKAN_HPP_NAMESPACE::DeviceAddress sequenceCountAddress_ ) VULKAN_HPP_NOEXCEPT
{
sequenceCountAddress = sequenceCountAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setMaxDrawCount( uint32_t maxDrawCount_ ) VULKAN_HPP_NOEXCEPT
{
maxDrawCount = maxDrawCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGeneratedCommandsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeneratedCommandsInfoEXT*>( this );
}
operator VkGeneratedCommandsInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeneratedCommandsInfoEXT*>( this );
}
operator VkGeneratedCommandsInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGeneratedCommandsInfoEXT*>( this );
}
operator VkGeneratedCommandsInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGeneratedCommandsInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderStages, indirectExecutionSet, indirectCommandsLayout, indirectAddress, indirectAddressSize, preprocessAddress, preprocessSize, maxSequenceCount, sequenceCountAddress, maxDrawCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GeneratedCommandsInfoEXT const & ) const = default;
#else
bool operator==( GeneratedCommandsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderStages == rhs.shaderStages )
&& ( indirectExecutionSet == rhs.indirectExecutionSet )
&& ( indirectCommandsLayout == rhs.indirectCommandsLayout )
&& ( indirectAddress == rhs.indirectAddress )
&& ( indirectAddressSize == rhs.indirectAddressSize )
&& ( preprocessAddress == rhs.preprocessAddress )
&& ( preprocessSize == rhs.preprocessSize )
&& ( maxSequenceCount == rhs.maxSequenceCount )
&& ( sequenceCountAddress == rhs.sequenceCountAddress )
&& ( maxDrawCount == rhs.maxDrawCount );
#endif
}
bool operator!=( GeneratedCommandsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages = {};
VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout = {};
VULKAN_HPP_NAMESPACE::DeviceAddress indirectAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize indirectAddressSize = {};
VULKAN_HPP_NAMESPACE::DeviceAddress preprocessAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {};
uint32_t maxSequenceCount = {};
VULKAN_HPP_NAMESPACE::DeviceAddress sequenceCountAddress = {};
uint32_t maxDrawCount = {};
};
template <>
struct CppType<StructureType, StructureType::eGeneratedCommandsInfoEXT>
{
using Type = GeneratedCommandsInfoEXT;
};
// wrapper struct for struct VkIndirectCommandsStreamNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsStreamNV.html
struct IndirectCommandsStreamNV
{
using NativeType = VkIndirectCommandsStreamNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
: buffer{ buffer_ }, offset{ offset_ }
{}
VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectCommandsStreamNV( *reinterpret_cast<IndirectCommandsStreamNV const *>( &rhs ) )
{}
IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectCommandsStreamNV*>( this );
}
operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectCommandsStreamNV*>( this );
}
operator VkIndirectCommandsStreamNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectCommandsStreamNV*>( this );
}
operator VkIndirectCommandsStreamNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectCommandsStreamNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( buffer, offset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( IndirectCommandsStreamNV const & ) const = default;
#else
bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( buffer == rhs.buffer )
&& ( offset == rhs.offset );
#endif
}
bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
};
// wrapper struct for struct VkGeneratedCommandsInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsInfoNV.html
struct GeneratedCommandsInfoNV
{
using NativeType = VkGeneratedCommandsInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {}, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineBindPoint{ pipelineBindPoint_ }, pipeline{ pipeline_ }, indirectCommandsLayout{ indirectCommandsLayout_ }, streamCount{ streamCount_ }, pStreams{ pStreams_ }, sequencesCount{ sequencesCount_ }, preprocessBuffer{ preprocessBuffer_ }, preprocessOffset{ preprocessOffset_ }, preprocessSize{ preprocessSize_ }, sequencesCountBuffer{ sequencesCountBuffer_ }, sequencesCountOffset{ sequencesCountOffset_ }, sequencesIndexBuffer{ sequencesIndexBuffer_ }, sequencesIndexOffset{ sequencesIndexOffset_ }
{}
VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: GeneratedCommandsInfoNV( *reinterpret_cast<GeneratedCommandsInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::Pipeline pipeline_, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( static_cast<uint32_t>( streams_.size() ) ), pStreams( streams_.data() ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
{
indirectCommandsLayout = indirectCommandsLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
{
streamCount = streamCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT
{
pStreams = pStreams_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GeneratedCommandsInfoNV & setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_ ) VULKAN_HPP_NOEXCEPT
{
streamCount = static_cast<uint32_t>( streams_.size() );
pStreams = streams_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT
{
sequencesCount = sequencesCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
{
preprocessBuffer = preprocessBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
{
preprocessOffset = preprocessOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
{
preprocessSize = preprocessSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
{
sequencesCountBuffer = sequencesCountBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
{
sequencesCountOffset = sequencesCountOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
sequencesIndexBuffer = sequencesIndexBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
{
sequencesIndexOffset = sequencesIndexOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeneratedCommandsInfoNV*>( this );
}
operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeneratedCommandsInfoNV*>( this );
}
operator VkGeneratedCommandsInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGeneratedCommandsInfoNV*>( this );
}
operator VkGeneratedCommandsInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGeneratedCommandsInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, VULKAN_HPP_NAMESPACE::Pipeline const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, streamCount, pStreams, sequencesCount, preprocessBuffer, preprocessOffset, preprocessSize, sequencesCountBuffer, sequencesCountOffset, sequencesIndexBuffer, sequencesIndexOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GeneratedCommandsInfoNV const & ) const = default;
#else
bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
&& ( pipeline == rhs.pipeline )
&& ( indirectCommandsLayout == rhs.indirectCommandsLayout )
&& ( streamCount == rhs.streamCount )
&& ( pStreams == rhs.pStreams )
&& ( sequencesCount == rhs.sequencesCount )
&& ( preprocessBuffer == rhs.preprocessBuffer )
&& ( preprocessOffset == rhs.preprocessOffset )
&& ( preprocessSize == rhs.preprocessSize )
&& ( sequencesCountBuffer == rhs.sequencesCountBuffer )
&& ( sequencesCountOffset == rhs.sequencesCountOffset )
&& ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
&& ( sequencesIndexOffset == rhs.sequencesIndexOffset );
#endif
}
bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
uint32_t streamCount = {};
const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams = {};
uint32_t sequencesCount = {};
VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {};
VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {};
VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
};
template <>
struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
{
using Type = GeneratedCommandsInfoNV;
};
// wrapper struct for struct VkGeneratedCommandsMemoryRequirementsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsMemoryRequirementsInfoEXT.html
struct GeneratedCommandsMemoryRequirementsInfoEXT
{
using NativeType = VkGeneratedCommandsMemoryRequirementsInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoEXT(VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ = {}, uint32_t maxSequenceCount_ = {}, uint32_t maxDrawCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, indirectExecutionSet{ indirectExecutionSet_ }, indirectCommandsLayout{ indirectCommandsLayout_ }, maxSequenceCount{ maxSequenceCount_ }, maxDrawCount{ maxDrawCount_ }
{}
VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoEXT( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GeneratedCommandsMemoryRequirementsInfoEXT( VkGeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: GeneratedCommandsMemoryRequirementsInfoEXT( *reinterpret_cast<GeneratedCommandsMemoryRequirementsInfoEXT const *>( &rhs ) )
{}
GeneratedCommandsMemoryRequirementsInfoEXT & operator=( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GeneratedCommandsMemoryRequirementsInfoEXT & operator=( VkGeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setIndirectExecutionSet( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ ) VULKAN_HPP_NOEXCEPT
{
indirectExecutionSet = indirectExecutionSet_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
{
indirectCommandsLayout = indirectCommandsLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setMaxSequenceCount( uint32_t maxSequenceCount_ ) VULKAN_HPP_NOEXCEPT
{
maxSequenceCount = maxSequenceCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setMaxDrawCount( uint32_t maxDrawCount_ ) VULKAN_HPP_NOEXCEPT
{
maxDrawCount = maxDrawCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGeneratedCommandsMemoryRequirementsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT*>( this );
}
operator VkGeneratedCommandsMemoryRequirementsInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoEXT*>( this );
}
operator VkGeneratedCommandsMemoryRequirementsInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT*>( this );
}
operator VkGeneratedCommandsMemoryRequirementsInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, indirectExecutionSet, indirectCommandsLayout, maxSequenceCount, maxDrawCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GeneratedCommandsMemoryRequirementsInfoEXT const & ) const = default;
#else
bool operator==( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( indirectExecutionSet == rhs.indirectExecutionSet )
&& ( indirectCommandsLayout == rhs.indirectCommandsLayout )
&& ( maxSequenceCount == rhs.maxSequenceCount )
&& ( maxDrawCount == rhs.maxDrawCount );
#endif
}
bool operator!=( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout = {};
uint32_t maxSequenceCount = {};
uint32_t maxDrawCount = {};
};
template <>
struct CppType<StructureType, StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT>
{
using Type = GeneratedCommandsMemoryRequirementsInfoEXT;
};
// wrapper struct for struct VkGeneratedCommandsMemoryRequirementsInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsMemoryRequirementsInfoNV.html
struct GeneratedCommandsMemoryRequirementsInfoNV
{
using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t maxSequencesCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineBindPoint{ pipelineBindPoint_ }, pipeline{ pipeline_ }, indirectCommandsLayout{ indirectCommandsLayout_ }, maxSequencesCount{ maxSequencesCount_ }
{}
VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: GeneratedCommandsMemoryRequirementsInfoNV( *reinterpret_cast<GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs ) )
{}
GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
{
indirectCommandsLayout = indirectCommandsLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
{
maxSequencesCount = maxSequencesCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
}
operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
}
operator VkGeneratedCommandsMemoryRequirementsInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
}
operator VkGeneratedCommandsMemoryRequirementsInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, VULKAN_HPP_NAMESPACE::Pipeline const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, maxSequencesCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default;
#else
bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
&& ( pipeline == rhs.pipeline )
&& ( indirectCommandsLayout == rhs.indirectCommandsLayout )
&& ( maxSequencesCount == rhs.maxSequencesCount );
#endif
}
bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
uint32_t maxSequencesCount = {};
};
template <>
struct CppType<StructureType, StructureType::eGeneratedCommandsMemoryRequirementsInfoNV>
{
using Type = GeneratedCommandsMemoryRequirementsInfoNV;
};
// wrapper struct for struct VkGeneratedCommandsPipelineInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsPipelineInfoEXT.html
struct GeneratedCommandsPipelineInfoEXT
{
using NativeType = VkGeneratedCommandsPipelineInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsPipelineInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GeneratedCommandsPipelineInfoEXT(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipeline{ pipeline_ }
{}
VULKAN_HPP_CONSTEXPR GeneratedCommandsPipelineInfoEXT( GeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GeneratedCommandsPipelineInfoEXT( VkGeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: GeneratedCommandsPipelineInfoEXT( *reinterpret_cast<GeneratedCommandsPipelineInfoEXT const *>( &rhs ) )
{}
GeneratedCommandsPipelineInfoEXT & operator=( GeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GeneratedCommandsPipelineInfoEXT & operator=( VkGeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsPipelineInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGeneratedCommandsPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeneratedCommandsPipelineInfoEXT*>( this );
}
operator VkGeneratedCommandsPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeneratedCommandsPipelineInfoEXT*>( this );
}
operator VkGeneratedCommandsPipelineInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGeneratedCommandsPipelineInfoEXT*>( this );
}
operator VkGeneratedCommandsPipelineInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGeneratedCommandsPipelineInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Pipeline const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipeline );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GeneratedCommandsPipelineInfoEXT const & ) const = default;
#else
bool operator==( GeneratedCommandsPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipeline == rhs.pipeline );
#endif
}
bool operator!=( GeneratedCommandsPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsPipelineInfoEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
};
template <>
struct CppType<StructureType, StructureType::eGeneratedCommandsPipelineInfoEXT>
{
using Type = GeneratedCommandsPipelineInfoEXT;
};
// wrapper struct for struct VkGeneratedCommandsShaderInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsShaderInfoEXT.html
struct GeneratedCommandsShaderInfoEXT
{
using NativeType = VkGeneratedCommandsShaderInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsShaderInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GeneratedCommandsShaderInfoEXT(uint32_t shaderCount_ = {}, const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderCount{ shaderCount_ }, pShaders{ pShaders_ }
{}
VULKAN_HPP_CONSTEXPR GeneratedCommandsShaderInfoEXT( GeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GeneratedCommandsShaderInfoEXT( VkGeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: GeneratedCommandsShaderInfoEXT( *reinterpret_cast<GeneratedCommandsShaderInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GeneratedCommandsShaderInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders_, void * pNext_ = nullptr )
: pNext( pNext_ ), shaderCount( static_cast<uint32_t>( shaders_.size() ) ), pShaders( shaders_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
GeneratedCommandsShaderInfoEXT & operator=( GeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GeneratedCommandsShaderInfoEXT & operator=( VkGeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsShaderInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT
{
shaderCount = shaderCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setPShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders_ ) VULKAN_HPP_NOEXCEPT
{
pShaders = pShaders_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GeneratedCommandsShaderInfoEXT & setShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders_ ) VULKAN_HPP_NOEXCEPT
{
shaderCount = static_cast<uint32_t>( shaders_.size() );
pShaders = shaders_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGeneratedCommandsShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeneratedCommandsShaderInfoEXT*>( this );
}
operator VkGeneratedCommandsShaderInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeneratedCommandsShaderInfoEXT*>( this );
}
operator VkGeneratedCommandsShaderInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGeneratedCommandsShaderInfoEXT*>( this );
}
operator VkGeneratedCommandsShaderInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGeneratedCommandsShaderInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ShaderEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderCount, pShaders );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GeneratedCommandsShaderInfoEXT const & ) const = default;
#else
bool operator==( GeneratedCommandsShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderCount == rhs.shaderCount )
&& ( pShaders == rhs.pShaders );
#endif
}
bool operator!=( GeneratedCommandsShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsShaderInfoEXT;
void * pNext = {};
uint32_t shaderCount = {};
const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders = {};
};
template <>
struct CppType<StructureType, StructureType::eGeneratedCommandsShaderInfoEXT>
{
using Type = GeneratedCommandsShaderInfoEXT;
};
// wrapper struct for struct VkLatencyTimingsFrameReportNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencyTimingsFrameReportNV.html
struct LatencyTimingsFrameReportNV
{
using NativeType = VkLatencyTimingsFrameReportNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencyTimingsFrameReportNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV(uint64_t presentID_ = {}, uint64_t inputSampleTimeUs_ = {}, uint64_t simStartTimeUs_ = {}, uint64_t simEndTimeUs_ = {}, uint64_t renderSubmitStartTimeUs_ = {}, uint64_t renderSubmitEndTimeUs_ = {}, uint64_t presentStartTimeUs_ = {}, uint64_t presentEndTimeUs_ = {}, uint64_t driverStartTimeUs_ = {}, uint64_t driverEndTimeUs_ = {}, uint64_t osRenderQueueStartTimeUs_ = {}, uint64_t osRenderQueueEndTimeUs_ = {}, uint64_t gpuRenderStartTimeUs_ = {}, uint64_t gpuRenderEndTimeUs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentID{ presentID_ }, inputSampleTimeUs{ inputSampleTimeUs_ }, simStartTimeUs{ simStartTimeUs_ }, simEndTimeUs{ simEndTimeUs_ }, renderSubmitStartTimeUs{ renderSubmitStartTimeUs_ }, renderSubmitEndTimeUs{ renderSubmitEndTimeUs_ }, presentStartTimeUs{ presentStartTimeUs_ }, presentEndTimeUs{ presentEndTimeUs_ }, driverStartTimeUs{ driverStartTimeUs_ }, driverEndTimeUs{ driverEndTimeUs_ }, osRenderQueueStartTimeUs{ osRenderQueueStartTimeUs_ }, osRenderQueueEndTimeUs{ osRenderQueueEndTimeUs_ }, gpuRenderStartTimeUs{ gpuRenderStartTimeUs_ }, gpuRenderEndTimeUs{ gpuRenderEndTimeUs_ }
{}
VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
LatencyTimingsFrameReportNV( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT
: LatencyTimingsFrameReportNV( *reinterpret_cast<LatencyTimingsFrameReportNV const *>( &rhs ) )
{}
LatencyTimingsFrameReportNV & operator=( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
LatencyTimingsFrameReportNV & operator=( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV const *>( &rhs );
return *this;
}
operator VkLatencyTimingsFrameReportNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkLatencyTimingsFrameReportNV*>( this );
}
operator VkLatencyTimingsFrameReportNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkLatencyTimingsFrameReportNV*>( this );
}
operator VkLatencyTimingsFrameReportNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkLatencyTimingsFrameReportNV*>( this );
}
operator VkLatencyTimingsFrameReportNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkLatencyTimingsFrameReportNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentID, inputSampleTimeUs, simStartTimeUs, simEndTimeUs, renderSubmitStartTimeUs, renderSubmitEndTimeUs, presentStartTimeUs, presentEndTimeUs, driverStartTimeUs, driverEndTimeUs, osRenderQueueStartTimeUs, osRenderQueueEndTimeUs, gpuRenderStartTimeUs, gpuRenderEndTimeUs );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( LatencyTimingsFrameReportNV const & ) const = default;
#else
bool operator==( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentID == rhs.presentID )
&& ( inputSampleTimeUs == rhs.inputSampleTimeUs )
&& ( simStartTimeUs == rhs.simStartTimeUs )
&& ( simEndTimeUs == rhs.simEndTimeUs )
&& ( renderSubmitStartTimeUs == rhs.renderSubmitStartTimeUs )
&& ( renderSubmitEndTimeUs == rhs.renderSubmitEndTimeUs )
&& ( presentStartTimeUs == rhs.presentStartTimeUs )
&& ( presentEndTimeUs == rhs.presentEndTimeUs )
&& ( driverStartTimeUs == rhs.driverStartTimeUs )
&& ( driverEndTimeUs == rhs.driverEndTimeUs )
&& ( osRenderQueueStartTimeUs == rhs.osRenderQueueStartTimeUs )
&& ( osRenderQueueEndTimeUs == rhs.osRenderQueueEndTimeUs )
&& ( gpuRenderStartTimeUs == rhs.gpuRenderStartTimeUs )
&& ( gpuRenderEndTimeUs == rhs.gpuRenderEndTimeUs );
#endif
}
bool operator!=( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencyTimingsFrameReportNV;
const void * pNext = {};
uint64_t presentID = {};
uint64_t inputSampleTimeUs = {};
uint64_t simStartTimeUs = {};
uint64_t simEndTimeUs = {};
uint64_t renderSubmitStartTimeUs = {};
uint64_t renderSubmitEndTimeUs = {};
uint64_t presentStartTimeUs = {};
uint64_t presentEndTimeUs = {};
uint64_t driverStartTimeUs = {};
uint64_t driverEndTimeUs = {};
uint64_t osRenderQueueStartTimeUs = {};
uint64_t osRenderQueueEndTimeUs = {};
uint64_t gpuRenderStartTimeUs = {};
uint64_t gpuRenderEndTimeUs = {};
};
template <>
struct CppType<StructureType, StructureType::eLatencyTimingsFrameReportNV>
{
using Type = LatencyTimingsFrameReportNV;
};
// wrapper struct for struct VkGetLatencyMarkerInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGetLatencyMarkerInfoNV.html
struct GetLatencyMarkerInfoNV
{
using NativeType = VkGetLatencyMarkerInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGetLatencyMarkerInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV(uint32_t timingCount_ = {}, VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, timingCount{ timingCount_ }, pTimings{ pTimings_ }
{}
VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GetLatencyMarkerInfoNV( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: GetLatencyMarkerInfoNV( *reinterpret_cast<GetLatencyMarkerInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GetLatencyMarkerInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV> const & timings_, const void * pNext_ = nullptr )
: pNext( pNext_ ), timingCount( static_cast<uint32_t>( timings_.size() ) ), pTimings( timings_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
GetLatencyMarkerInfoNV & operator=( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GetLatencyMarkerInfoNV & operator=( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setTimingCount( uint32_t timingCount_ ) VULKAN_HPP_NOEXCEPT
{
timingCount = timingCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPTimings( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings_ ) VULKAN_HPP_NOEXCEPT
{
pTimings = pTimings_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GetLatencyMarkerInfoNV & setTimings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV> const & timings_ ) VULKAN_HPP_NOEXCEPT
{
timingCount = static_cast<uint32_t>( timings_.size() );
pTimings = timings_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGetLatencyMarkerInfoNV*>( this );
}
operator VkGetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGetLatencyMarkerInfoNV*>( this );
}
operator VkGetLatencyMarkerInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGetLatencyMarkerInfoNV*>( this );
}
operator VkGetLatencyMarkerInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGetLatencyMarkerInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, timingCount, pTimings );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GetLatencyMarkerInfoNV const & ) const = default;
#else
bool operator==( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( timingCount == rhs.timingCount )
&& ( pTimings == rhs.pTimings );
#endif
}
bool operator!=( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGetLatencyMarkerInfoNV;
const void * pNext = {};
uint32_t timingCount = {};
VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings = {};
};
template <>
struct CppType<StructureType, StructureType::eGetLatencyMarkerInfoNV>
{
using Type = GetLatencyMarkerInfoNV;
};
// wrapper struct for struct VkVertexInputBindingDescription, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputBindingDescription.html
struct VertexInputBindingDescription
{
using NativeType = VkVertexInputBindingDescription;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VertexInputBindingDescription(uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex) VULKAN_HPP_NOEXCEPT
: binding{ binding_ }, stride{ stride_ }, inputRate{ inputRate_ }
{}
VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
: VertexInputBindingDescription( *reinterpret_cast<VertexInputBindingDescription const *>( &rhs ) )
{}
VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
{
binding = binding_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
{
stride = stride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
{
inputRate = inputRate_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVertexInputBindingDescription*>( this );
}
operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVertexInputBindingDescription*>( this );
}
operator VkVertexInputBindingDescription const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVertexInputBindingDescription*>( this );
}
operator VkVertexInputBindingDescription *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVertexInputBindingDescription*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VertexInputRate const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( binding, stride, inputRate );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VertexInputBindingDescription const & ) const = default;
#else
bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( binding == rhs.binding )
&& ( stride == rhs.stride )
&& ( inputRate == rhs.inputRate );
#endif
}
bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t binding = {};
uint32_t stride = {};
VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
};
// wrapper struct for struct VkVertexInputAttributeDescription, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputAttributeDescription.html
struct VertexInputAttributeDescription
{
using NativeType = VkVertexInputAttributeDescription;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription(uint32_t location_ = {}, uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}) VULKAN_HPP_NOEXCEPT
: location{ location_ }, binding{ binding_ }, format{ format_ }, offset{ offset_ }
{}
VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
: VertexInputAttributeDescription( *reinterpret_cast<VertexInputAttributeDescription const *>( &rhs ) )
{}
VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
{
location = location_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
{
binding = binding_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVertexInputAttributeDescription*>( this );
}
operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVertexInputAttributeDescription*>( this );
}
operator VkVertexInputAttributeDescription const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVertexInputAttributeDescription*>( this );
}
operator VkVertexInputAttributeDescription *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVertexInputAttributeDescription*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Format const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( location, binding, format, offset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VertexInputAttributeDescription const & ) const = default;
#else
bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( location == rhs.location )
&& ( binding == rhs.binding )
&& ( format == rhs.format )
&& ( offset == rhs.offset );
#endif
}
bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t location = {};
uint32_t binding = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
uint32_t offset = {};
};
// wrapper struct for struct VkPipelineVertexInputStateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineVertexInputStateCreateInfo.html
struct PipelineVertexInputStateCreateInfo
{
using NativeType = VkPipelineVertexInputStateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, uint32_t vertexBindingDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {}, uint32_t vertexAttributeDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, vertexBindingDescriptionCount{ vertexBindingDescriptionCount_ }, pVertexBindingDescriptions{ pVertexBindingDescriptions_ }, vertexAttributeDescriptionCount{ vertexAttributeDescriptionCount_ }, pVertexAttributeDescriptions{ pVertexAttributeDescriptions_ }
{}
VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineVertexInputStateCreateInfo( *reinterpret_cast<PipelineVertexInputStateCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), vertexBindingDescriptionCount( static_cast<uint32_t>( vertexBindingDescriptions_.size() ) ), pVertexBindingDescriptions( vertexBindingDescriptions_.data() ), vertexAttributeDescriptionCount( static_cast<uint32_t>( vertexAttributeDescriptions_.size() ) ), pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
{
vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
{
pVertexBindingDescriptions = pVertexBindingDescriptions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
{
vertexBindingDescriptionCount = static_cast<uint32_t>( vertexBindingDescriptions_.size() );
pVertexBindingDescriptions = vertexBindingDescriptions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
{
pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeDescriptionCount = static_cast<uint32_t>( vertexAttributeDescriptions_.size() );
pVertexAttributeDescriptions = vertexAttributeDescriptions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>( this );
}
operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo*>( this );
}
operator VkPipelineVertexInputStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>( this );
}
operator VkPipelineVertexInputStateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineVertexInputStateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default;
#else
bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
&& ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
&& ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
&& ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
#endif
}
bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {};
uint32_t vertexBindingDescriptionCount = {};
const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {};
uint32_t vertexAttributeDescriptionCount = {};
const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineVertexInputStateCreateInfo>
{
using Type = PipelineVertexInputStateCreateInfo;
};
// wrapper struct for struct VkPipelineInputAssemblyStateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineInputAssemblyStateCreateInfo.html
struct PipelineInputAssemblyStateCreateInfo
{
using NativeType = VkPipelineInputAssemblyStateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, topology{ topology_ }, primitiveRestartEnable{ primitiveRestartEnable_ }
{}
VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineInputAssemblyStateCreateInfo( *reinterpret_cast<PipelineInputAssemblyStateCreateInfo const *>( &rhs ) )
{}
PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
{
topology = topology_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
{
primitiveRestartEnable = primitiveRestartEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>( this );
}
operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo*>( this );
}
operator VkPipelineInputAssemblyStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>( this );
}
operator VkPipelineInputAssemblyStateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags const &, VULKAN_HPP_NAMESPACE::PrimitiveTopology const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, topology, primitiveRestartEnable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default;
#else
bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( topology == rhs.topology )
&& ( primitiveRestartEnable == rhs.primitiveRestartEnable );
#endif
}
bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList;
VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineInputAssemblyStateCreateInfo>
{
using Type = PipelineInputAssemblyStateCreateInfo;
};
// wrapper struct for struct VkPipelineTessellationStateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineTessellationStateCreateInfo.html
struct PipelineTessellationStateCreateInfo
{
using NativeType = VkPipelineTessellationStateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, uint32_t patchControlPoints_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, patchControlPoints{ patchControlPoints_ }
{}
VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineTessellationStateCreateInfo( *reinterpret_cast<PipelineTessellationStateCreateInfo const *>( &rhs ) )
{}
PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
{
patchControlPoints = patchControlPoints_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>( this );
}
operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineTessellationStateCreateInfo*>( this );
}
operator VkPipelineTessellationStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>( this );
}
operator VkPipelineTessellationStateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineTessellationStateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, patchControlPoints );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default;
#else
bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( patchControlPoints == rhs.patchControlPoints );
#endif
}
bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {};
uint32_t patchControlPoints = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineTessellationStateCreateInfo>
{
using Type = PipelineTessellationStateCreateInfo;
};
// wrapper struct for struct VkPipelineViewportStateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportStateCreateInfo.html
struct PipelineViewportStateCreateInfo
{
using NativeType = VkPipelineViewportStateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {}, uint32_t scissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, viewportCount{ viewportCount_ }, pViewports{ pViewports_ }, scissorCount{ scissorCount_ }, pScissors{ pScissors_ }
{}
VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineViewportStateCreateInfo( *reinterpret_cast<PipelineViewportStateCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), viewportCount( static_cast<uint32_t>( viewports_.size() ) ), pViewports( viewports_.data() ), scissorCount( static_cast<uint32_t>( scissors_.size() ) ), pScissors( scissors_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = viewportCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT
{
pViewports = pViewports_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportStateCreateInfo & setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = static_cast<uint32_t>( viewports_.size() );
pViewports = viewports_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
{
scissorCount = scissorCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT
{
pScissors = pScissors_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportStateCreateInfo & setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ ) VULKAN_HPP_NOEXCEPT
{
scissorCount = static_cast<uint32_t>( scissors_.size() );
pScissors = scissors_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>( this );
}
operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportStateCreateInfo*>( this );
}
operator VkPipelineViewportStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineViewportStateCreateInfo*>( this );
}
operator VkPipelineViewportStateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineViewportStateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Viewport * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, viewportCount, pViewports, scissorCount, pScissors );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default;
#else
bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( viewportCount == rhs.viewportCount )
&& ( pViewports == rhs.pViewports )
&& ( scissorCount == rhs.scissorCount )
&& ( pScissors == rhs.pScissors );
#endif
}
bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {};
uint32_t viewportCount = {};
const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {};
uint32_t scissorCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineViewportStateCreateInfo>
{
using Type = PipelineViewportStateCreateInfo;
};
// wrapper struct for struct VkPipelineRasterizationStateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationStateCreateInfo.html
struct PipelineRasterizationStateCreateInfo
{
using NativeType = VkPipelineRasterizationStateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, float depthBiasConstantFactor_ = {}, float depthBiasClamp_ = {}, float depthBiasSlopeFactor_ = {}, float lineWidth_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, depthClampEnable{ depthClampEnable_ }, rasterizerDiscardEnable{ rasterizerDiscardEnable_ }, polygonMode{ polygonMode_ }, cullMode{ cullMode_ }, frontFace{ frontFace_ }, depthBiasEnable{ depthBiasEnable_ }, depthBiasConstantFactor{ depthBiasConstantFactor_ }, depthBiasClamp{ depthBiasClamp_ }, depthBiasSlopeFactor{ depthBiasSlopeFactor_ }, lineWidth{ lineWidth_ }
{}
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineRasterizationStateCreateInfo( *reinterpret_cast<PipelineRasterizationStateCreateInfo const *>( &rhs ) )
{}
PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthClampEnable = depthClampEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
{
rasterizerDiscardEnable = rasterizerDiscardEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
{
polygonMode = polygonMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
{
cullMode = cullMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
{
frontFace = frontFace_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasEnable = depthBiasEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasConstantFactor = depthBiasConstantFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasClamp = depthBiasClamp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasSlopeFactor = depthBiasSlopeFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT
{
lineWidth = lineWidth_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>( this );
}
operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo*>( this );
}
operator VkPipelineRasterizationStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>( this );
}
operator VkPipelineRasterizationStateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineRasterizationStateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::PolygonMode const &, VULKAN_HPP_NAMESPACE::CullModeFlags const &, VULKAN_HPP_NAMESPACE::FrontFace const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, float const &, float const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, depthClampEnable, rasterizerDiscardEnable, polygonMode, cullMode, frontFace, depthBiasEnable, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor, lineWidth );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default;
#else
bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( depthClampEnable == rhs.depthClampEnable )
&& ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
&& ( polygonMode == rhs.polygonMode )
&& ( cullMode == rhs.cullMode )
&& ( frontFace == rhs.frontFace )
&& ( depthBiasEnable == rhs.depthBiasEnable )
&& ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
&& ( depthBiasClamp == rhs.depthBiasClamp )
&& ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
&& ( lineWidth == rhs.lineWidth );
#endif
}
bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {};
VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill;
VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {};
VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise;
VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {};
float depthBiasConstantFactor = {};
float depthBiasClamp = {};
float depthBiasSlopeFactor = {};
float lineWidth = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationStateCreateInfo>
{
using Type = PipelineRasterizationStateCreateInfo;
};
// wrapper struct for struct VkPipelineMultisampleStateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineMultisampleStateCreateInfo.html
struct PipelineMultisampleStateCreateInfo
{
using NativeType = VkPipelineMultisampleStateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, float minSampleShading_ = {}, const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, rasterizationSamples{ rasterizationSamples_ }, sampleShadingEnable{ sampleShadingEnable_ }, minSampleShading{ minSampleShading_ }, pSampleMask{ pSampleMask_ }, alphaToCoverageEnable{ alphaToCoverageEnable_ }, alphaToOneEnable{ alphaToOneEnable_ }
{}
VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineMultisampleStateCreateInfo( *reinterpret_cast<PipelineMultisampleStateCreateInfo const *>( &rhs ) )
{}
PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationSamples = rasterizationSamples_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
{
sampleShadingEnable = sampleShadingEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
{
minSampleShading = minSampleShading_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT
{
pSampleMask = pSampleMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
{
alphaToCoverageEnable = alphaToCoverageEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
{
alphaToOneEnable = alphaToOneEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>( this );
}
operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo*>( this );
}
operator VkPipelineMultisampleStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>( this );
}
operator VkPipelineMultisampleStateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineMultisampleStateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, const VULKAN_HPP_NAMESPACE::SampleMask * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, rasterizationSamples, sampleShadingEnable, minSampleShading, pSampleMask, alphaToCoverageEnable, alphaToOneEnable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default;
#else
bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( rasterizationSamples == rhs.rasterizationSamples )
&& ( sampleShadingEnable == rhs.sampleShadingEnable )
&& ( minSampleShading == rhs.minSampleShading )
&& ( pSampleMask == rhs.pSampleMask )
&& ( alphaToCoverageEnable == rhs.alphaToCoverageEnable )
&& ( alphaToOneEnable == rhs.alphaToOneEnable );
#endif
}
bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {};
float minSampleShading = {};
const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {};
VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineMultisampleStateCreateInfo>
{
using Type = PipelineMultisampleStateCreateInfo;
};
// wrapper struct for struct VkStencilOpState, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStencilOpState.html
struct StencilOpState
{
using NativeType = VkStencilOpState;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR StencilOpState(VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, uint32_t compareMask_ = {}, uint32_t writeMask_ = {}, uint32_t reference_ = {}) VULKAN_HPP_NOEXCEPT
: failOp{ failOp_ }, passOp{ passOp_ }, depthFailOp{ depthFailOp_ }, compareOp{ compareOp_ }, compareMask{ compareMask_ }, writeMask{ writeMask_ }, reference{ reference_ }
{}
VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
: StencilOpState( *reinterpret_cast<StencilOpState const *>( &rhs ) )
{}
StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StencilOpState const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
{
failOp = failOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
{
passOp = passOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
{
depthFailOp = depthFailOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
{
compareOp = compareOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT
{
compareMask = compareMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT
{
writeMask = writeMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT
{
reference = reference_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkStencilOpState*>( this );
}
operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkStencilOpState*>( this );
}
operator VkStencilOpState const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkStencilOpState*>( this );
}
operator VkStencilOpState *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkStencilOpState*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StencilOp const &, VULKAN_HPP_NAMESPACE::StencilOp const &, VULKAN_HPP_NAMESPACE::StencilOp const &, VULKAN_HPP_NAMESPACE::CompareOp const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( failOp, passOp, depthFailOp, compareOp, compareMask, writeMask, reference );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( StencilOpState const & ) const = default;
#else
bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( failOp == rhs.failOp )
&& ( passOp == rhs.passOp )
&& ( depthFailOp == rhs.depthFailOp )
&& ( compareOp == rhs.compareOp )
&& ( compareMask == rhs.compareMask )
&& ( writeMask == rhs.writeMask )
&& ( reference == rhs.reference );
#endif
}
bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
uint32_t compareMask = {};
uint32_t writeMask = {};
uint32_t reference = {};
};
// wrapper struct for struct VkPipelineDepthStencilStateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDepthStencilStateCreateInfo.html
struct PipelineDepthStencilStateCreateInfo
{
using NativeType = VkPipelineDepthStencilStateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, float minDepthBounds_ = {}, float maxDepthBounds_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, depthTestEnable{ depthTestEnable_ }, depthWriteEnable{ depthWriteEnable_ }, depthCompareOp{ depthCompareOp_ }, depthBoundsTestEnable{ depthBoundsTestEnable_ }, stencilTestEnable{ stencilTestEnable_ }, front{ front_ }, back{ back_ }, minDepthBounds{ minDepthBounds_ }, maxDepthBounds{ maxDepthBounds_ }
{}
VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineDepthStencilStateCreateInfo( *reinterpret_cast<PipelineDepthStencilStateCreateInfo const *>( &rhs ) )
{}
PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthTestEnable = depthTestEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthWriteEnable = depthWriteEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
{
depthCompareOp = depthCompareOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthBoundsTestEnable = depthBoundsTestEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
stencilTestEnable = stencilTestEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT
{
front = front_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT
{
back = back_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
{
minDepthBounds = minDepthBounds_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
{
maxDepthBounds = maxDepthBounds_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>( this );
}
operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo*>( this );
}
operator VkPipelineDepthStencilStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>( this );
}
operator VkPipelineDepthStencilStateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineDepthStencilStateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::CompareOp const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::StencilOpState const &, VULKAN_HPP_NAMESPACE::StencilOpState const &, float const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, depthTestEnable, depthWriteEnable, depthCompareOp, depthBoundsTestEnable, stencilTestEnable, front, back, minDepthBounds, maxDepthBounds );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default;
#else
bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( depthTestEnable == rhs.depthTestEnable )
&& ( depthWriteEnable == rhs.depthWriteEnable )
&& ( depthCompareOp == rhs.depthCompareOp )
&& ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
&& ( stencilTestEnable == rhs.stencilTestEnable )
&& ( front == rhs.front )
&& ( back == rhs.back )
&& ( minDepthBounds == rhs.minDepthBounds )
&& ( maxDepthBounds == rhs.maxDepthBounds );
#endif
}
bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {};
VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {};
VULKAN_HPP_NAMESPACE::StencilOpState front = {};
VULKAN_HPP_NAMESPACE::StencilOpState back = {};
float minDepthBounds = {};
float maxDepthBounds = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineDepthStencilStateCreateInfo>
{
using Type = PipelineDepthStencilStateCreateInfo;
};
// wrapper struct for struct VkPipelineColorBlendAttachmentState, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendAttachmentState.html
struct PipelineColorBlendAttachmentState
{
using NativeType = VkPipelineColorBlendAttachmentState;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState(VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {}) VULKAN_HPP_NOEXCEPT
: blendEnable{ blendEnable_ }, srcColorBlendFactor{ srcColorBlendFactor_ }, dstColorBlendFactor{ dstColorBlendFactor_ }, colorBlendOp{ colorBlendOp_ }, srcAlphaBlendFactor{ srcAlphaBlendFactor_ }, dstAlphaBlendFactor{ dstAlphaBlendFactor_ }, alphaBlendOp{ alphaBlendOp_ }, colorWriteMask{ colorWriteMask_ }
{}
VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineColorBlendAttachmentState( *reinterpret_cast<PipelineColorBlendAttachmentState const *>( &rhs ) )
{}
PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
{
blendEnable = blendEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
srcColorBlendFactor = srcColorBlendFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
dstColorBlendFactor = dstColorBlendFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
{
colorBlendOp = colorBlendOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
srcAlphaBlendFactor = srcAlphaBlendFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
dstAlphaBlendFactor = dstAlphaBlendFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
{
alphaBlendOp = alphaBlendOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
{
colorWriteMask = colorWriteMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>( this );
}
operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineColorBlendAttachmentState*>( this );
}
operator VkPipelineColorBlendAttachmentState const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineColorBlendAttachmentState*>( this );
}
operator VkPipelineColorBlendAttachmentState *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineColorBlendAttachmentState*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendOp const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendOp const &, VULKAN_HPP_NAMESPACE::ColorComponentFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( blendEnable, srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp, colorWriteMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default;
#else
bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( blendEnable == rhs.blendEnable )
&& ( srcColorBlendFactor == rhs.srcColorBlendFactor )
&& ( dstColorBlendFactor == rhs.dstColorBlendFactor )
&& ( colorBlendOp == rhs.colorBlendOp )
&& ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
&& ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
&& ( alphaBlendOp == rhs.alphaBlendOp )
&& ( colorWriteMask == rhs.colorWriteMask );
#endif
}
bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {};
VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {};
};
// wrapper struct for struct VkPipelineColorBlendStateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendStateCreateInfo.html
struct PipelineColorBlendStateCreateInfo
{
using NativeType = VkPipelineColorBlendStateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {}, std::array<float,4> const & blendConstants_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, logicOpEnable{ logicOpEnable_ }, logicOp{ logicOp_ }, attachmentCount{ attachmentCount_ }, pAttachments{ pAttachments_ }, blendConstants{ blendConstants_ }
{}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineColorBlendStateCreateInfo( *reinterpret_cast<PipelineColorBlendStateCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, VULKAN_HPP_NAMESPACE::LogicOp logicOp_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_, std::array<float,4> const & blendConstants_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), blendConstants( blendConstants_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
{
logicOpEnable = logicOpEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
{
logicOp = logicOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = attachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineColorBlendStateCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = static_cast<uint32_t>( attachments_.size() );
pAttachments = attachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setBlendConstants( std::array<float,4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
{
blendConstants = blendConstants_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>( this );
}
operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo*>( this );
}
operator VkPipelineColorBlendStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>( this );
}
operator VkPipelineColorBlendStateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineColorBlendStateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::LogicOp const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, logicOpEnable, logicOp, attachmentCount, pAttachments, blendConstants );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default;
#else
bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( logicOpEnable == rhs.logicOpEnable )
&& ( logicOp == rhs.logicOp )
&& ( attachmentCount == rhs.attachmentCount )
&& ( pAttachments == rhs.pAttachments )
&& ( blendConstants == rhs.blendConstants );
#endif
}
bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {};
VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear;
uint32_t attachmentCount = {};
const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> blendConstants = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineColorBlendStateCreateInfo>
{
using Type = PipelineColorBlendStateCreateInfo;
};
// wrapper struct for struct VkPipelineDynamicStateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDynamicStateCreateInfo.html
struct PipelineDynamicStateCreateInfo
{
using NativeType = VkPipelineDynamicStateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, uint32_t dynamicStateCount_ = {}, const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, dynamicStateCount{ dynamicStateCount_ }, pDynamicStates{ pDynamicStates_ }
{}
VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineDynamicStateCreateInfo( *reinterpret_cast<PipelineDynamicStateCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), dynamicStateCount( static_cast<uint32_t>( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
{
dynamicStateCount = dynamicStateCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
{
pDynamicStates = pDynamicStates_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineDynamicStateCreateInfo & setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT
{
dynamicStateCount = static_cast<uint32_t>( dynamicStates_.size() );
pDynamicStates = dynamicStates_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>( this );
}
operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineDynamicStateCreateInfo*>( this );
}
operator VkPipelineDynamicStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>( this );
}
operator VkPipelineDynamicStateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineDynamicStateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DynamicState * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, dynamicStateCount, pDynamicStates );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default;
#else
bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( dynamicStateCount == rhs.dynamicStateCount )
&& ( pDynamicStates == rhs.pDynamicStates );
#endif
}
bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {};
uint32_t dynamicStateCount = {};
const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineDynamicStateCreateInfo>
{
using Type = PipelineDynamicStateCreateInfo;
};
// wrapper struct for struct VkGraphicsPipelineCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineCreateInfo.html
struct GraphicsPipelineCreateInfo
{
using NativeType = VkGraphicsPipelineCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, stageCount{ stageCount_ }, pStages{ pStages_ }, pVertexInputState{ pVertexInputState_ }, pInputAssemblyState{ pInputAssemblyState_ }, pTessellationState{ pTessellationState_ }, pViewportState{ pViewportState_ }, pRasterizationState{ pRasterizationState_ }, pMultisampleState{ pMultisampleState_ }, pDepthStencilState{ pDepthStencilState_ }, pColorBlendState{ pColorBlendState_ }, pDynamicState{ pDynamicState_ }, layout{ layout_ }, renderPass{ renderPass_ }, subpass{ subpass_ }, basePipelineHandle{ basePipelineHandle_ }, basePipelineIndex{ basePipelineIndex_ }
{}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: GraphicsPipelineCreateInfo( *reinterpret_cast<GraphicsPipelineCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = stageCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
{
pStages = pStages_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GraphicsPipelineCreateInfo & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = static_cast<uint32_t>( stages_.size() );
pStages = stages_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
{
pVertexInputState = pVertexInputState_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
{
pInputAssemblyState = pInputAssemblyState_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
{
pTessellationState = pTessellationState_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT
{
pViewportState = pViewportState_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
{
pRasterizationState = pRasterizationState_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
{
pMultisampleState = pMultisampleState_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
{
pDepthStencilState = pDepthStencilState_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
{
pColorBlendState = pColorBlendState_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
{
pDynamicState = pDynamicState_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
{
subpass = subpass_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineHandle = basePipelineHandle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineIndex = basePipelineIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( this );
}
operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGraphicsPipelineCreateInfo*>( this );
}
operator VkGraphicsPipelineCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( this );
}
operator VkGraphicsPipelineCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGraphicsPipelineCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, stageCount, pStages, pVertexInputState, pInputAssemblyState, pTessellationState, pViewportState, pRasterizationState, pMultisampleState, pDepthStencilState, pColorBlendState, pDynamicState, layout, renderPass, subpass, basePipelineHandle, basePipelineIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default;
#else
bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( stageCount == rhs.stageCount )
&& ( pStages == rhs.pStages )
&& ( pVertexInputState == rhs.pVertexInputState )
&& ( pInputAssemblyState == rhs.pInputAssemblyState )
&& ( pTessellationState == rhs.pTessellationState )
&& ( pViewportState == rhs.pViewportState )
&& ( pRasterizationState == rhs.pRasterizationState )
&& ( pMultisampleState == rhs.pMultisampleState )
&& ( pDepthStencilState == rhs.pDepthStencilState )
&& ( pColorBlendState == rhs.pColorBlendState )
&& ( pDynamicState == rhs.pDynamicState )
&& ( layout == rhs.layout )
&& ( renderPass == rhs.renderPass )
&& ( subpass == rhs.subpass )
&& ( basePipelineHandle == rhs.basePipelineHandle )
&& ( basePipelineIndex == rhs.basePipelineIndex );
#endif
}
bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
uint32_t stageCount = {};
const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {};
const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {};
const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {};
const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {};
const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {};
const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState = {};
const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState = {};
const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState = {};
const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {};
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
uint32_t subpass = {};
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
int32_t basePipelineIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eGraphicsPipelineCreateInfo>
{
using Type = GraphicsPipelineCreateInfo;
};
// wrapper struct for struct VkGraphicsPipelineLibraryCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineLibraryCreateInfoEXT.html
struct GraphicsPipelineLibraryCreateInfoEXT
{
using NativeType = VkGraphicsPipelineLibraryCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT(VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GraphicsPipelineLibraryCreateInfoEXT( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: GraphicsPipelineLibraryCreateInfoEXT( *reinterpret_cast<GraphicsPipelineLibraryCreateInfoEXT const *>( &rhs ) )
{}
GraphicsPipelineLibraryCreateInfoEXT & operator=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GraphicsPipelineLibraryCreateInfoEXT & operator=( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGraphicsPipelineLibraryCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGraphicsPipelineLibraryCreateInfoEXT*>( this );
}
operator VkGraphicsPipelineLibraryCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGraphicsPipelineLibraryCreateInfoEXT*>( this );
}
operator VkGraphicsPipelineLibraryCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGraphicsPipelineLibraryCreateInfoEXT*>( this );
}
operator VkGraphicsPipelineLibraryCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGraphicsPipelineLibraryCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GraphicsPipelineLibraryCreateInfoEXT const & ) const = default;
#else
bool operator==( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags = {};
};
template <>
struct CppType<StructureType, StructureType::eGraphicsPipelineLibraryCreateInfoEXT>
{
using Type = GraphicsPipelineLibraryCreateInfoEXT;
};
// wrapper struct for struct VkGraphicsShaderGroupCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsShaderGroupCreateInfoNV.html
struct GraphicsShaderGroupCreateInfoNV
{
using NativeType = VkGraphicsShaderGroupCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV(uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stageCount{ stageCount_ }, pStages{ pStages_ }, pVertexInputState{ pVertexInputState_ }, pTessellationState{ pTessellationState_ }
{}
VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: GraphicsShaderGroupCreateInfoNV( *reinterpret_cast<GraphicsShaderGroupCreateInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GraphicsShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = stageCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
{
pStages = pStages_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GraphicsShaderGroupCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = static_cast<uint32_t>( stages_.size() );
pStages = stages_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
{
pVertexInputState = pVertexInputState_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
{
pTessellationState = pTessellationState_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGraphicsShaderGroupCreateInfoNV*>( this );
}
operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGraphicsShaderGroupCreateInfoNV*>( this );
}
operator VkGraphicsShaderGroupCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGraphicsShaderGroupCreateInfoNV*>( this );
}
operator VkGraphicsShaderGroupCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGraphicsShaderGroupCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stageCount, pStages, pVertexInputState, pTessellationState );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default;
#else
bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stageCount == rhs.stageCount )
&& ( pStages == rhs.pStages )
&& ( pVertexInputState == rhs.pVertexInputState )
&& ( pTessellationState == rhs.pTessellationState );
#endif
}
bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV;
const void * pNext = {};
uint32_t stageCount = {};
const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {};
const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {};
};
template <>
struct CppType<StructureType, StructureType::eGraphicsShaderGroupCreateInfoNV>
{
using Type = GraphicsShaderGroupCreateInfoNV;
};
// wrapper struct for struct VkGraphicsPipelineShaderGroupsCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineShaderGroupsCreateInfoNV.html
struct GraphicsPipelineShaderGroupsCreateInfoNV
{
using NativeType = VkGraphicsPipelineShaderGroupsCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {}, uint32_t pipelineCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, groupCount{ groupCount_ }, pGroups{ pGroups_ }, pipelineCount{ pipelineCount_ }, pPipelines{ pPipelines_ }
{}
VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: GraphicsPipelineShaderGroupsCreateInfoNV( *reinterpret_cast<GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GraphicsPipelineShaderGroupsCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), pipelineCount( static_cast<uint32_t>( pipelines_.size() ) ), pPipelines( pipelines_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
{
groupCount = groupCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
{
pGroups = pGroups_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
{
groupCount = static_cast<uint32_t>( groups_.size() );
pGroups = groups_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT
{
pipelineCount = pipelineCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT
{
pPipelines = pPipelines_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ ) VULKAN_HPP_NOEXCEPT
{
pipelineCount = static_cast<uint32_t>( pipelines_.size() );
pPipelines = pipelines_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
}
operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
}
operator VkGraphicsPipelineShaderGroupsCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
}
operator VkGraphicsPipelineShaderGroupsCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Pipeline * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, groupCount, pGroups, pipelineCount, pPipelines );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default;
#else
bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( groupCount == rhs.groupCount )
&& ( pGroups == rhs.pGroups )
&& ( pipelineCount == rhs.pipelineCount )
&& ( pPipelines == rhs.pPipelines );
#endif
}
bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
const void * pNext = {};
uint32_t groupCount = {};
const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups = {};
uint32_t pipelineCount = {};
const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines = {};
};
template <>
struct CppType<StructureType, StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV>
{
using Type = GraphicsPipelineShaderGroupsCreateInfoNV;
};
// wrapper struct for struct VkXYColorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkXYColorEXT.html
struct XYColorEXT
{
using NativeType = VkXYColorEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR XYColorEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT
: x{ x_ }, y{ y_ }
{}
VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: XYColorEXT( *reinterpret_cast<XYColorEXT const *>( &rhs ) )
{}
XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XYColorEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
{
x = x_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
{
y = y_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkXYColorEXT*>( this );
}
operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkXYColorEXT*>( this );
}
operator VkXYColorEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkXYColorEXT*>( this );
}
operator VkXYColorEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkXYColorEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<float const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( x, y );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( XYColorEXT const & ) const = default;
#else
bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( x == rhs.x )
&& ( y == rhs.y );
#endif
}
bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
float x = {};
float y = {};
};
// wrapper struct for struct VkHdrMetadataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHdrMetadataEXT.html
struct HdrMetadataEXT
{
using NativeType = VkHdrMetadataEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR HdrMetadataEXT(VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, float maxLuminance_ = {}, float minLuminance_ = {}, float maxContentLightLevel_ = {}, float maxFrameAverageLightLevel_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, displayPrimaryRed{ displayPrimaryRed_ }, displayPrimaryGreen{ displayPrimaryGreen_ }, displayPrimaryBlue{ displayPrimaryBlue_ }, whitePoint{ whitePoint_ }, maxLuminance{ maxLuminance_ }, minLuminance{ minLuminance_ }, maxContentLightLevel{ maxContentLightLevel_ }, maxFrameAverageLightLevel{ maxFrameAverageLightLevel_ }
{}
VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: HdrMetadataEXT( *reinterpret_cast<HdrMetadataEXT const *>( &rhs ) )
{}
HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HdrMetadataEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
{
displayPrimaryRed = displayPrimaryRed_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
{
displayPrimaryGreen = displayPrimaryGreen_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
{
displayPrimaryBlue = displayPrimaryBlue_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT
{
whitePoint = whitePoint_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT
{
maxLuminance = maxLuminance_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT
{
minLuminance = minLuminance_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT
{
maxContentLightLevel = maxContentLightLevel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT
{
maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkHdrMetadataEXT*>( this );
}
operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkHdrMetadataEXT*>( this );
}
operator VkHdrMetadataEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkHdrMetadataEXT*>( this );
}
operator VkHdrMetadataEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkHdrMetadataEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::XYColorEXT const &, VULKAN_HPP_NAMESPACE::XYColorEXT const &, VULKAN_HPP_NAMESPACE::XYColorEXT const &, VULKAN_HPP_NAMESPACE::XYColorEXT const &, float const &, float const &, float const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, displayPrimaryRed, displayPrimaryGreen, displayPrimaryBlue, whitePoint, maxLuminance, minLuminance, maxContentLightLevel, maxFrameAverageLightLevel );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( HdrMetadataEXT const & ) const = default;
#else
bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( displayPrimaryRed == rhs.displayPrimaryRed )
&& ( displayPrimaryGreen == rhs.displayPrimaryGreen )
&& ( displayPrimaryBlue == rhs.displayPrimaryBlue )
&& ( whitePoint == rhs.whitePoint )
&& ( maxLuminance == rhs.maxLuminance )
&& ( minLuminance == rhs.minLuminance )
&& ( maxContentLightLevel == rhs.maxContentLightLevel )
&& ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
#endif
}
bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {};
VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {};
VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {};
VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {};
float maxLuminance = {};
float minLuminance = {};
float maxContentLightLevel = {};
float maxFrameAverageLightLevel = {};
};
template <>
struct CppType<StructureType, StructureType::eHdrMetadataEXT>
{
using Type = HdrMetadataEXT;
};
// wrapper struct for struct VkHdrVividDynamicMetadataHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHdrVividDynamicMetadataHUAWEI.html
struct HdrVividDynamicMetadataHUAWEI
{
using NativeType = VkHdrVividDynamicMetadataHUAWEI;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrVividDynamicMetadataHUAWEI;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR HdrVividDynamicMetadataHUAWEI(size_t dynamicMetadataSize_ = {}, const void * pDynamicMetadata_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dynamicMetadataSize{ dynamicMetadataSize_ }, pDynamicMetadata{ pDynamicMetadata_ }
{}
VULKAN_HPP_CONSTEXPR HdrVividDynamicMetadataHUAWEI( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
HdrVividDynamicMetadataHUAWEI( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
: HdrVividDynamicMetadataHUAWEI( *reinterpret_cast<HdrVividDynamicMetadataHUAWEI const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
HdrVividDynamicMetadataHUAWEI( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & dynamicMetadata_, const void * pNext_ = nullptr )
: pNext( pNext_ ), dynamicMetadataSize( dynamicMetadata_.size() * sizeof(T) ), pDynamicMetadata( dynamicMetadata_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
HdrVividDynamicMetadataHUAWEI & operator=( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
HdrVividDynamicMetadataHUAWEI & operator=( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HdrVividDynamicMetadataHUAWEI const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setDynamicMetadataSize( size_t dynamicMetadataSize_ ) VULKAN_HPP_NOEXCEPT
{
dynamicMetadataSize = dynamicMetadataSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPDynamicMetadata( const void * pDynamicMetadata_ ) VULKAN_HPP_NOEXCEPT
{
pDynamicMetadata = pDynamicMetadata_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
HdrVividDynamicMetadataHUAWEI & setDynamicMetadata( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & dynamicMetadata_ ) VULKAN_HPP_NOEXCEPT
{
dynamicMetadataSize = dynamicMetadata_.size() * sizeof(T);
pDynamicMetadata = dynamicMetadata_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkHdrVividDynamicMetadataHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkHdrVividDynamicMetadataHUAWEI*>( this );
}
operator VkHdrVividDynamicMetadataHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkHdrVividDynamicMetadataHUAWEI*>( this );
}
operator VkHdrVividDynamicMetadataHUAWEI const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkHdrVividDynamicMetadataHUAWEI*>( this );
}
operator VkHdrVividDynamicMetadataHUAWEI *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkHdrVividDynamicMetadataHUAWEI*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, size_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dynamicMetadataSize, pDynamicMetadata );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( HdrVividDynamicMetadataHUAWEI const & ) const = default;
#else
bool operator==( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dynamicMetadataSize == rhs.dynamicMetadataSize )
&& ( pDynamicMetadata == rhs.pDynamicMetadata );
#endif
}
bool operator!=( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrVividDynamicMetadataHUAWEI;
const void * pNext = {};
size_t dynamicMetadataSize = {};
const void * pDynamicMetadata = {};
};
template <>
struct CppType<StructureType, StructureType::eHdrVividDynamicMetadataHUAWEI>
{
using Type = HdrVividDynamicMetadataHUAWEI;
};
// wrapper struct for struct VkHeadlessSurfaceCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHeadlessSurfaceCreateInfoEXT.html
struct HeadlessSurfaceCreateInfoEXT
{
using NativeType = VkHeadlessSurfaceCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: HeadlessSurfaceCreateInfoEXT( *reinterpret_cast<HeadlessSurfaceCreateInfoEXT const *>( &rhs ) )
{}
HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( this );
}
operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT*>( this );
}
operator VkHeadlessSurfaceCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( this );
}
operator VkHeadlessSurfaceCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default;
#else
bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {};
};
template <>
struct CppType<StructureType, StructureType::eHeadlessSurfaceCreateInfoEXT>
{
using Type = HeadlessSurfaceCreateInfoEXT;
};
// wrapper struct for struct VkHostImageCopyDevicePerformanceQuery, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostImageCopyDevicePerformanceQuery.html
struct HostImageCopyDevicePerformanceQuery
{
using NativeType = VkHostImageCopyDevicePerformanceQuery;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageCopyDevicePerformanceQuery;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQuery(VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, optimalDeviceAccess{ optimalDeviceAccess_ }, identicalMemoryLayout{ identicalMemoryLayout_ }
{}
VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQuery( HostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT = default;
HostImageCopyDevicePerformanceQuery( VkHostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT
: HostImageCopyDevicePerformanceQuery( *reinterpret_cast<HostImageCopyDevicePerformanceQuery const *>( &rhs ) )
{}
HostImageCopyDevicePerformanceQuery & operator=( HostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
HostImageCopyDevicePerformanceQuery & operator=( VkHostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQuery const *>( &rhs );
return *this;
}
operator VkHostImageCopyDevicePerformanceQuery const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkHostImageCopyDevicePerformanceQuery*>( this );
}
operator VkHostImageCopyDevicePerformanceQuery &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkHostImageCopyDevicePerformanceQuery*>( this );
}
operator VkHostImageCopyDevicePerformanceQuery const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkHostImageCopyDevicePerformanceQuery*>( this );
}
operator VkHostImageCopyDevicePerformanceQuery *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkHostImageCopyDevicePerformanceQuery*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, optimalDeviceAccess, identicalMemoryLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( HostImageCopyDevicePerformanceQuery const & ) const = default;
#else
bool operator==( HostImageCopyDevicePerformanceQuery const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( optimalDeviceAccess == rhs.optimalDeviceAccess )
&& ( identicalMemoryLayout == rhs.identicalMemoryLayout );
#endif
}
bool operator!=( HostImageCopyDevicePerformanceQuery const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageCopyDevicePerformanceQuery;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout = {};
};
template <>
struct CppType<StructureType, StructureType::eHostImageCopyDevicePerformanceQuery>
{
using Type = HostImageCopyDevicePerformanceQuery;
};
using HostImageCopyDevicePerformanceQueryEXT = HostImageCopyDevicePerformanceQuery;
// wrapper struct for struct VkHostImageLayoutTransitionInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostImageLayoutTransitionInfo.html
struct HostImageLayoutTransitionInfo
{
using NativeType = VkHostImageLayoutTransitionInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageLayoutTransitionInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, image{ image_ }, oldLayout{ oldLayout_ }, newLayout{ newLayout_ }, subresourceRange{ subresourceRange_ }
{}
VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfo( HostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
HostImageLayoutTransitionInfo( VkHostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: HostImageLayoutTransitionInfo( *reinterpret_cast<HostImageLayoutTransitionInfo const *>( &rhs ) )
{}
HostImageLayoutTransitionInfo & operator=( HostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
HostImageLayoutTransitionInfo & operator=( VkHostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
{
oldLayout = oldLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
{
newLayout = newLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
{
subresourceRange = subresourceRange_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkHostImageLayoutTransitionInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkHostImageLayoutTransitionInfo*>( this );
}
operator VkHostImageLayoutTransitionInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkHostImageLayoutTransitionInfo*>( this );
}
operator VkHostImageLayoutTransitionInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkHostImageLayoutTransitionInfo*>( this );
}
operator VkHostImageLayoutTransitionInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkHostImageLayoutTransitionInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, image, oldLayout, newLayout, subresourceRange );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( HostImageLayoutTransitionInfo const & ) const = default;
#else
bool operator==( HostImageLayoutTransitionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( image == rhs.image )
&& ( oldLayout == rhs.oldLayout )
&& ( newLayout == rhs.newLayout )
&& ( subresourceRange == rhs.subresourceRange );
#endif
}
bool operator!=( HostImageLayoutTransitionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageLayoutTransitionInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
};
template <>
struct CppType<StructureType, StructureType::eHostImageLayoutTransitionInfo>
{
using Type = HostImageLayoutTransitionInfo;
};
using HostImageLayoutTransitionInfoEXT = HostImageLayoutTransitionInfo;
#if defined( VK_USE_PLATFORM_IOS_MVK )
// wrapper struct for struct VkIOSSurfaceCreateInfoMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIOSSurfaceCreateInfoMVK.html
struct IOSSurfaceCreateInfoMVK
{
using NativeType = VkIOSSurfaceCreateInfoMVK;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, const void * pView_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pView{ pView_ }
{}
VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
: IOSSurfaceCreateInfoMVK( *reinterpret_cast<IOSSurfaceCreateInfoMVK const *>( &rhs ) )
{}
IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT
{
pView = pView_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( this );
}
operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIOSSurfaceCreateInfoMVK*>( this );
}
operator VkIOSSurfaceCreateInfoMVK const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( this );
}
operator VkIOSSurfaceCreateInfoMVK *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIOSSurfaceCreateInfoMVK*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pView );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default;
#else
bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( pView == rhs.pView );
#endif
}
bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {};
const void * pView = {};
};
template <>
struct CppType<StructureType, StructureType::eIosSurfaceCreateInfoMVK>
{
using Type = IOSSurfaceCreateInfoMVK;
};
#endif /*VK_USE_PLATFORM_IOS_MVK*/
// wrapper struct for struct VkImageAlignmentControlCreateInfoMESA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageAlignmentControlCreateInfoMESA.html
struct ImageAlignmentControlCreateInfoMESA
{
using NativeType = VkImageAlignmentControlCreateInfoMESA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageAlignmentControlCreateInfoMESA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageAlignmentControlCreateInfoMESA(uint32_t maximumRequestedAlignment_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maximumRequestedAlignment{ maximumRequestedAlignment_ }
{}
VULKAN_HPP_CONSTEXPR ImageAlignmentControlCreateInfoMESA( ImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageAlignmentControlCreateInfoMESA( VkImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageAlignmentControlCreateInfoMESA( *reinterpret_cast<ImageAlignmentControlCreateInfoMESA const *>( &rhs ) )
{}
ImageAlignmentControlCreateInfoMESA & operator=( ImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageAlignmentControlCreateInfoMESA & operator=( VkImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageAlignmentControlCreateInfoMESA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA & setMaximumRequestedAlignment( uint32_t maximumRequestedAlignment_ ) VULKAN_HPP_NOEXCEPT
{
maximumRequestedAlignment = maximumRequestedAlignment_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageAlignmentControlCreateInfoMESA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageAlignmentControlCreateInfoMESA*>( this );
}
operator VkImageAlignmentControlCreateInfoMESA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageAlignmentControlCreateInfoMESA*>( this );
}
operator VkImageAlignmentControlCreateInfoMESA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageAlignmentControlCreateInfoMESA*>( this );
}
operator VkImageAlignmentControlCreateInfoMESA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageAlignmentControlCreateInfoMESA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maximumRequestedAlignment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageAlignmentControlCreateInfoMESA const & ) const = default;
#else
bool operator==( ImageAlignmentControlCreateInfoMESA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maximumRequestedAlignment == rhs.maximumRequestedAlignment );
#endif
}
bool operator!=( ImageAlignmentControlCreateInfoMESA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageAlignmentControlCreateInfoMESA;
const void * pNext = {};
uint32_t maximumRequestedAlignment = {};
};
template <>
struct CppType<StructureType, StructureType::eImageAlignmentControlCreateInfoMESA>
{
using Type = ImageAlignmentControlCreateInfoMESA;
};
// wrapper struct for struct VkImageBlit, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageBlit.html
struct ImageBlit
{
using NativeType = VkImageBlit;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ImageBlit(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
: srcSubresource{ srcSubresource_ }, srcOffsets{ srcOffsets_ }, dstSubresource{ dstSubresource_ }, dstOffsets{ dstOffsets_ }
{}
VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) )
{}
ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
{
srcOffsets = srcOffsets_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
{
dstOffsets = dstOffsets_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageBlit*>( this );
}
operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageBlit*>( this );
}
operator VkImageBlit const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageBlit*>( this );
}
operator VkImageBlit *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageBlit*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( srcSubresource, srcOffsets, dstSubresource, dstOffsets );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageBlit const & ) const = default;
#else
bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( srcSubresource == rhs.srcSubresource )
&& ( srcOffsets == rhs.srcOffsets )
&& ( dstSubresource == rhs.dstSubresource )
&& ( dstOffsets == rhs.dstOffsets );
#endif
}
bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
};
// wrapper struct for struct VkImageCaptureDescriptorDataInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCaptureDescriptorDataInfoEXT.html
struct ImageCaptureDescriptorDataInfoEXT
{
using NativeType = VkImageCaptureDescriptorDataInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCaptureDescriptorDataInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT(VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, image{ image_ }
{}
VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageCaptureDescriptorDataInfoEXT( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageCaptureDescriptorDataInfoEXT( *reinterpret_cast<ImageCaptureDescriptorDataInfoEXT const *>( &rhs ) )
{}
ImageCaptureDescriptorDataInfoEXT & operator=( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageCaptureDescriptorDataInfoEXT & operator=( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT*>( this );
}
operator VkImageCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageCaptureDescriptorDataInfoEXT*>( this );
}
operator VkImageCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT*>( this );
}
operator VkImageCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageCaptureDescriptorDataInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, image );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageCaptureDescriptorDataInfoEXT const & ) const = default;
#else
bool operator==( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( image == rhs.image );
#endif
}
bool operator!=( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCaptureDescriptorDataInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image image = {};
};
template <>
struct CppType<StructureType, StructureType::eImageCaptureDescriptorDataInfoEXT>
{
using Type = ImageCaptureDescriptorDataInfoEXT;
};
// wrapper struct for struct VkImageCompressionControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCompressionControlEXT.html
struct ImageCompressionControlEXT
{
using NativeType = VkImageCompressionControlEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionControlEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT(VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ = {}, uint32_t compressionControlPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, compressionControlPlaneCount{ compressionControlPlaneCount_ }, pFixedRateFlags{ pFixedRateFlags_ }
{}
VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageCompressionControlEXT( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageCompressionControlEXT( *reinterpret_cast<ImageCompressionControlEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageCompressionControlEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT> const & fixedRateFlags_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), compressionControlPlaneCount( static_cast<uint32_t>( fixedRateFlags_.size() ) ), pFixedRateFlags( fixedRateFlags_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ImageCompressionControlEXT & operator=( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageCompressionControlEXT & operator=( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setCompressionControlPlaneCount( uint32_t compressionControlPlaneCount_ ) VULKAN_HPP_NOEXCEPT
{
compressionControlPlaneCount = compressionControlPlaneCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPFixedRateFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ ) VULKAN_HPP_NOEXCEPT
{
pFixedRateFlags = pFixedRateFlags_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageCompressionControlEXT & setFixedRateFlags( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT> const & fixedRateFlags_ ) VULKAN_HPP_NOEXCEPT
{
compressionControlPlaneCount = static_cast<uint32_t>( fixedRateFlags_.size() );
pFixedRateFlags = fixedRateFlags_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageCompressionControlEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageCompressionControlEXT*>( this );
}
operator VkImageCompressionControlEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageCompressionControlEXT*>( this );
}
operator VkImageCompressionControlEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageCompressionControlEXT*>( this );
}
operator VkImageCompressionControlEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageCompressionControlEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, compressionControlPlaneCount, pFixedRateFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageCompressionControlEXT const & ) const = default;
#else
bool operator==( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( compressionControlPlaneCount == rhs.compressionControlPlaneCount )
&& ( pFixedRateFlags == rhs.pFixedRateFlags );
#endif
}
bool operator!=( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionControlEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags = {};
uint32_t compressionControlPlaneCount = {};
VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags = {};
};
template <>
struct CppType<StructureType, StructureType::eImageCompressionControlEXT>
{
using Type = ImageCompressionControlEXT;
};
// wrapper struct for struct VkImageCompressionPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCompressionPropertiesEXT.html
struct ImageCompressionPropertiesEXT
{
using NativeType = VkImageCompressionPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT(VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags_ = {}, VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageCompressionFlags{ imageCompressionFlags_ }, imageCompressionFixedRateFlags{ imageCompressionFixedRateFlags_ }
{}
VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageCompressionPropertiesEXT( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageCompressionPropertiesEXT( *reinterpret_cast<ImageCompressionPropertiesEXT const *>( &rhs ) )
{}
ImageCompressionPropertiesEXT & operator=( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageCompressionPropertiesEXT & operator=( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT const *>( &rhs );
return *this;
}
operator VkImageCompressionPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageCompressionPropertiesEXT*>( this );
}
operator VkImageCompressionPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageCompressionPropertiesEXT*>( this );
}
operator VkImageCompressionPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageCompressionPropertiesEXT*>( this );
}
operator VkImageCompressionPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageCompressionPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT const &, VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageCompressionFlags, imageCompressionFixedRateFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageCompressionPropertiesEXT const & ) const = default;
#else
bool operator==( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageCompressionFlags == rhs.imageCompressionFlags )
&& ( imageCompressionFixedRateFlags == rhs.imageCompressionFixedRateFlags );
#endif
}
bool operator!=( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags = {};
VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags = {};
};
template <>
struct CppType<StructureType, StructureType::eImageCompressionPropertiesEXT>
{
using Type = ImageCompressionPropertiesEXT;
};
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkImageFormatConstraintsInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatConstraintsInfoFUCHSIA.html
struct ImageFormatConstraintsInfoFUCHSIA
{
using NativeType = VkImageFormatConstraintsInfoFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatConstraintsInfoFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ = {}, uint64_t sysmemPixelFormat_ = {}, uint32_t colorSpaceCount_ = {}, const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageCreateInfo{ imageCreateInfo_ }, requiredFormatFeatures{ requiredFormatFeatures_ }, flags{ flags_ }, sysmemPixelFormat{ sysmemPixelFormat_ }, colorSpaceCount{ colorSpaceCount_ }, pColorSpaces{ pColorSpaces_ }
{}
VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageFormatConstraintsInfoFUCHSIA( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageFormatConstraintsInfoFUCHSIA( *reinterpret_cast<ImageFormatConstraintsInfoFUCHSIA const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_, VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_, uint64_t sysmemPixelFormat_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA> const & colorSpaces_, const void * pNext_ = nullptr )
: pNext( pNext_ ), imageCreateInfo( imageCreateInfo_ ), requiredFormatFeatures( requiredFormatFeatures_ ), flags( flags_ ), sysmemPixelFormat( sysmemPixelFormat_ ), colorSpaceCount( static_cast<uint32_t>( colorSpaces_.size() ) ), pColorSpaces( colorSpaces_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ImageFormatConstraintsInfoFUCHSIA & operator=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageFormatConstraintsInfoFUCHSIA & operator=( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo_ ) VULKAN_HPP_NOEXCEPT
{
imageCreateInfo = imageCreateInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
{
requiredFormatFeatures = requiredFormatFeatures_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT
{
sysmemPixelFormat = sysmemPixelFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setColorSpaceCount( uint32_t colorSpaceCount_ ) VULKAN_HPP_NOEXCEPT
{
colorSpaceCount = colorSpaceCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPColorSpaces( const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ ) VULKAN_HPP_NOEXCEPT
{
pColorSpaces = pColorSpaces_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageFormatConstraintsInfoFUCHSIA & setColorSpaces( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA> const & colorSpaces_ ) VULKAN_HPP_NOEXCEPT
{
colorSpaceCount = static_cast<uint32_t>( colorSpaces_.size() );
pColorSpaces = colorSpaces_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageFormatConstraintsInfoFUCHSIA*>( this );
}
operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageFormatConstraintsInfoFUCHSIA*>( this );
}
operator VkImageFormatConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageFormatConstraintsInfoFUCHSIA*>( this );
}
operator VkImageFormatConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageFormatConstraintsInfoFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageCreateInfo const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA const &, uint64_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageCreateInfo, requiredFormatFeatures, flags, sysmemPixelFormat, colorSpaceCount, pColorSpaces );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageFormatConstraintsInfoFUCHSIA const & ) const = default;
#else
bool operator==( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageCreateInfo == rhs.imageCreateInfo )
&& ( requiredFormatFeatures == rhs.requiredFormatFeatures )
&& ( flags == rhs.flags )
&& ( sysmemPixelFormat == rhs.sysmemPixelFormat )
&& ( colorSpaceCount == rhs.colorSpaceCount )
&& ( pColorSpaces == rhs.pColorSpaces );
#endif
}
bool operator!=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatConstraintsInfoFUCHSIA;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {};
VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags = {};
uint64_t sysmemPixelFormat = {};
uint32_t colorSpaceCount = {};
const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces = {};
};
template <>
struct CppType<StructureType, StructureType::eImageFormatConstraintsInfoFUCHSIA>
{
using Type = ImageFormatConstraintsInfoFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkImageConstraintsInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageConstraintsInfoFUCHSIA.html
struct ImageConstraintsInfoFUCHSIA
{
using NativeType = VkImageConstraintsInfoFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageConstraintsInfoFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA(uint32_t formatConstraintsCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ = {}, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, formatConstraintsCount{ formatConstraintsCount_ }, pFormatConstraints{ pFormatConstraints_ }, bufferCollectionConstraints{ bufferCollectionConstraints_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageConstraintsInfoFUCHSIA( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageConstraintsInfoFUCHSIA( *reinterpret_cast<ImageConstraintsInfoFUCHSIA const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), formatConstraintsCount( static_cast<uint32_t>( formatConstraints_.size() ) ), pFormatConstraints( formatConstraints_.data() ), bufferCollectionConstraints( bufferCollectionConstraints_ ), flags( flags_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ImageConstraintsInfoFUCHSIA & operator=( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageConstraintsInfoFUCHSIA & operator=( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) VULKAN_HPP_NOEXCEPT
{
formatConstraintsCount = formatConstraintsCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPFormatConstraints( const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) VULKAN_HPP_NOEXCEPT
{
pFormatConstraints = pFormatConstraints_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageConstraintsInfoFUCHSIA & setFormatConstraints( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_ ) VULKAN_HPP_NOEXCEPT
{
formatConstraintsCount = static_cast<uint32_t>( formatConstraints_.size() );
pFormatConstraints = formatConstraints_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT
{
bufferCollectionConstraints = bufferCollectionConstraints_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageConstraintsInfoFUCHSIA*>( this );
}
operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageConstraintsInfoFUCHSIA*>( this );
}
operator VkImageConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageConstraintsInfoFUCHSIA*>( this );
}
operator VkImageConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageConstraintsInfoFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * const &, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const &, VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, formatConstraintsCount, pFormatConstraints, bufferCollectionConstraints, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageConstraintsInfoFUCHSIA const & ) const = default;
#else
bool operator==( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( formatConstraintsCount == rhs.formatConstraintsCount )
&& ( pFormatConstraints == rhs.pFormatConstraints )
&& ( bufferCollectionConstraints == rhs.bufferCollectionConstraints )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageConstraintsInfoFUCHSIA;
const void * pNext = {};
uint32_t formatConstraintsCount = {};
const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints = {};
VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {};
VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags = {};
};
template <>
struct CppType<StructureType, StructureType::eImageConstraintsInfoFUCHSIA>
{
using Type = ImageConstraintsInfoFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
// wrapper struct for struct VkImageCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCopy.html
struct ImageCopy
{
using NativeType = VkImageCopy;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageCopy(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
: srcSubresource{ srcSubresource_ }, srcOffset{ srcOffset_ }, dstSubresource{ dstSubresource_ }, dstOffset{ dstOffset_ }, extent{ extent_ }
{}
VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) )
{}
ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageCopy*>( this );
}
operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageCopy*>( this );
}
operator VkImageCopy const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageCopy*>( this );
}
operator VkImageCopy *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageCopy*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageCopy const & ) const = default;
#else
bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( srcSubresource == rhs.srcSubresource )
&& ( srcOffset == rhs.srcOffset )
&& ( dstSubresource == rhs.dstSubresource )
&& ( dstOffset == rhs.dstOffset )
&& ( extent == rhs.extent );
#endif
}
bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
};
// wrapper struct for struct VkSubresourceLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubresourceLayout.html
struct SubresourceLayout
{
using NativeType = VkSubresourceLayout;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubresourceLayout(VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {}) VULKAN_HPP_NOEXCEPT
: offset{ offset_ }, size{ size_ }, rowPitch{ rowPitch_ }, arrayPitch{ arrayPitch_ }, depthPitch{ depthPitch_ }
{}
VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
: SubresourceLayout( *reinterpret_cast<SubresourceLayout const *>( &rhs ) )
{}
SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setRowPitch( VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ ) VULKAN_HPP_NOEXCEPT
{
rowPitch = rowPitch_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setArrayPitch( VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ ) VULKAN_HPP_NOEXCEPT
{
arrayPitch = arrayPitch_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setDepthPitch( VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ ) VULKAN_HPP_NOEXCEPT
{
depthPitch = depthPitch_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubresourceLayout*>( this );
}
operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubresourceLayout*>( this );
}
operator VkSubresourceLayout const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubresourceLayout*>( this );
}
operator VkSubresourceLayout *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubresourceLayout*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( offset, size, rowPitch, arrayPitch, depthPitch );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubresourceLayout const & ) const = default;
#else
bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( offset == rhs.offset )
&& ( size == rhs.size )
&& ( rowPitch == rhs.rowPitch )
&& ( arrayPitch == rhs.arrayPitch )
&& ( depthPitch == rhs.depthPitch );
#endif
}
bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {};
VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {};
VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {};
};
// wrapper struct for struct VkImageDrmFormatModifierExplicitCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDrmFormatModifierExplicitCreateInfoEXT.html
struct ImageDrmFormatModifierExplicitCreateInfoEXT
{
using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, drmFormatModifier{ drmFormatModifier_ }, drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ }, pPlaneLayouts{ pPlaneLayouts_ }
{}
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageDrmFormatModifierExplicitCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_, const void * pNext_ = nullptr )
: pNext( pNext_ ), drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( static_cast<uint32_t>( planeLayouts_.size() ) ), pPlaneLayouts( planeLayouts_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
{
drmFormatModifier = drmFormatModifier_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT
{
drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pPlaneLayouts = pPlaneLayouts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT
{
drmFormatModifierPlaneCount = static_cast<uint32_t>( planeLayouts_.size() );
pPlaneLayouts = planeLayouts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
}
operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
}
operator VkImageDrmFormatModifierExplicitCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
}
operator VkImageDrmFormatModifierExplicitCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubresourceLayout * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, drmFormatModifier, drmFormatModifierPlaneCount, pPlaneLayouts );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default;
#else
bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( drmFormatModifier == rhs.drmFormatModifier )
&& ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
&& ( pPlaneLayouts == rhs.pPlaneLayouts );
#endif
}
bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
const void * pNext = {};
uint64_t drmFormatModifier = {};
uint32_t drmFormatModifierPlaneCount = {};
const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {};
};
template <>
struct CppType<StructureType, StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT>
{
using Type = ImageDrmFormatModifierExplicitCreateInfoEXT;
};
// wrapper struct for struct VkImageDrmFormatModifierListCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDrmFormatModifierListCreateInfoEXT.html
struct ImageDrmFormatModifierListCreateInfoEXT
{
using NativeType = VkImageDrmFormatModifierListCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT(uint32_t drmFormatModifierCount_ = {}, const uint64_t * pDrmFormatModifiers_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, drmFormatModifierCount{ drmFormatModifierCount_ }, pDrmFormatModifiers{ pDrmFormatModifiers_ }
{}
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageDrmFormatModifierListCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_, const void * pNext_ = nullptr )
: pNext( pNext_ ), drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
{
drmFormatModifierCount = drmFormatModifierCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
{
pDrmFormatModifiers = pDrmFormatModifiers_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
{
drmFormatModifierCount = static_cast<uint32_t>( drmFormatModifiers_.size() );
pDrmFormatModifiers = drmFormatModifiers_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT*>( this );
}
operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT*>( this );
}
operator VkImageDrmFormatModifierListCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT*>( this );
}
operator VkImageDrmFormatModifierListCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifiers );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default;
#else
bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( drmFormatModifierCount == rhs.drmFormatModifierCount )
&& ( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
#endif
}
bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
const void * pNext = {};
uint32_t drmFormatModifierCount = {};
const uint64_t * pDrmFormatModifiers = {};
};
template <>
struct CppType<StructureType, StructureType::eImageDrmFormatModifierListCreateInfoEXT>
{
using Type = ImageDrmFormatModifierListCreateInfoEXT;
};
// wrapper struct for struct VkImageDrmFormatModifierPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDrmFormatModifierPropertiesEXT.html
struct ImageDrmFormatModifierPropertiesEXT
{
using NativeType = VkImageDrmFormatModifierPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, drmFormatModifier{ drmFormatModifier_ }
{}
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast<ImageDrmFormatModifierPropertiesEXT const *>( &rhs ) )
{}
ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const *>( &rhs );
return *this;
}
operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT*>( this );
}
operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( this );
}
operator VkImageDrmFormatModifierPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT*>( this );
}
operator VkImageDrmFormatModifierPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, drmFormatModifier );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default;
#else
bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( drmFormatModifier == rhs.drmFormatModifier );
#endif
}
bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT;
void * pNext = {};
uint64_t drmFormatModifier = {};
};
template <>
struct CppType<StructureType, StructureType::eImageDrmFormatModifierPropertiesEXT>
{
using Type = ImageDrmFormatModifierPropertiesEXT;
};
// wrapper struct for struct VkImageFormatListCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatListCreateInfo.html
struct ImageFormatListCreateInfo
{
using NativeType = VkImageFormatListCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo(uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, viewFormatCount{ viewFormatCount_ }, pViewFormats{ pViewFormats_ }
{}
VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageFormatListCreateInfo( *reinterpret_cast<ImageFormatListCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_, const void * pNext_ = nullptr )
: pNext( pNext_ ), viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
{
viewFormatCount = viewFormatCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
{
pViewFormats = pViewFormats_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageFormatListCreateInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
{
viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
pViewFormats = viewFormats_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageFormatListCreateInfo*>( this );
}
operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageFormatListCreateInfo*>( this );
}
operator VkImageFormatListCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageFormatListCreateInfo*>( this );
}
operator VkImageFormatListCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageFormatListCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, viewFormatCount, pViewFormats );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageFormatListCreateInfo const & ) const = default;
#else
bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( viewFormatCount == rhs.viewFormatCount )
&& ( pViewFormats == rhs.pViewFormats );
#endif
}
bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo;
const void * pNext = {};
uint32_t viewFormatCount = {};
const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {};
};
template <>
struct CppType<StructureType, StructureType::eImageFormatListCreateInfo>
{
using Type = ImageFormatListCreateInfo;
};
using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;
// wrapper struct for struct VkImageFormatProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatProperties2.html
struct ImageFormatProperties2
{
using NativeType = VkImageFormatProperties2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageFormatProperties2(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageFormatProperties{ imageFormatProperties_ }
{}
VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageFormatProperties2( *reinterpret_cast<ImageFormatProperties2 const *>( &rhs ) )
{}
ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const *>( &rhs );
return *this;
}
operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageFormatProperties2*>( this );
}
operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageFormatProperties2*>( this );
}
operator VkImageFormatProperties2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageFormatProperties2*>( this );
}
operator VkImageFormatProperties2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageFormatProperties2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageFormatProperties const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageFormatProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageFormatProperties2 const & ) const = default;
#else
bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageFormatProperties == rhs.imageFormatProperties );
#endif
}
bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
};
template <>
struct CppType<StructureType, StructureType::eImageFormatProperties2>
{
using Type = ImageFormatProperties2;
};
using ImageFormatProperties2KHR = ImageFormatProperties2;
// wrapper struct for struct VkImageMemoryBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageMemoryBarrier.html
struct ImageMemoryBarrier
{
using NativeType = VkImageMemoryBarrier;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcAccessMask{ srcAccessMask_ }, dstAccessMask{ dstAccessMask_ }, oldLayout{ oldLayout_ }, newLayout{ newLayout_ }, srcQueueFamilyIndex{ srcQueueFamilyIndex_ }, dstQueueFamilyIndex{ dstQueueFamilyIndex_ }, image{ image_ }, subresourceRange{ subresourceRange_ }
{}
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) )
{}
ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
{
oldLayout = oldLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
{
newLayout = newLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
srcQueueFamilyIndex = srcQueueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
dstQueueFamilyIndex = dstQueueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
{
subresourceRange = subresourceRange_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageMemoryBarrier*>( this );
}
operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageMemoryBarrier*>( this );
}
operator VkImageMemoryBarrier const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageMemoryBarrier*>( this );
}
operator VkImageMemoryBarrier *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageMemoryBarrier*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcAccessMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageMemoryBarrier const & ) const = default;
#else
bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcAccessMask == rhs.srcAccessMask )
&& ( dstAccessMask == rhs.dstAccessMask )
&& ( oldLayout == rhs.oldLayout )
&& ( newLayout == rhs.newLayout )
&& ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
&& ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
&& ( image == rhs.image )
&& ( subresourceRange == rhs.subresourceRange );
#endif
}
bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
uint32_t srcQueueFamilyIndex = {};
uint32_t dstQueueFamilyIndex = {};
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
};
template <>
struct CppType<StructureType, StructureType::eImageMemoryBarrier>
{
using Type = ImageMemoryBarrier;
};
// wrapper struct for struct VkImageMemoryRequirementsInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageMemoryRequirementsInfo2.html
struct ImageMemoryRequirementsInfo2
{
using NativeType = VkImageMemoryRequirementsInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, image{ image_ }
{}
VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageMemoryRequirementsInfo2( *reinterpret_cast<ImageMemoryRequirementsInfo2 const *>( &rhs ) )
{}
ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( this );
}
operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageMemoryRequirementsInfo2*>( this );
}
operator VkImageMemoryRequirementsInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( this );
}
operator VkImageMemoryRequirementsInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageMemoryRequirementsInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, image );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default;
#else
bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( image == rhs.image );
#endif
}
bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image image = {};
};
template <>
struct CppType<StructureType, StructureType::eImageMemoryRequirementsInfo2>
{
using Type = ImageMemoryRequirementsInfo2;
};
using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkImagePipeSurfaceCreateInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImagePipeSurfaceCreateInfoFUCHSIA.html
struct ImagePipeSurfaceCreateInfoFUCHSIA
{
using NativeType = VkImagePipeSurfaceCreateInfoFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, zx_handle_t imagePipeHandle_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, imagePipeHandle{ imagePipeHandle_ }
{}
VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast<ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs ) )
{}
ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT
{
imagePipeHandle = imagePipeHandle_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
}
operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
}
operator VkImagePipeSurfaceCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
}
operator VkImagePipeSurfaceCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA const &, zx_handle_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, imagePipeHandle );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 );
}
bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {};
zx_handle_t imagePipeHandle = {};
};
template <>
struct CppType<StructureType, StructureType::eImagepipeSurfaceCreateInfoFUCHSIA>
{
using Type = ImagePipeSurfaceCreateInfoFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
// wrapper struct for struct VkImagePlaneMemoryRequirementsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImagePlaneMemoryRequirementsInfo.html
struct ImagePlaneMemoryRequirementsInfo
{
using NativeType = VkImagePlaneMemoryRequirementsInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, planeAspect{ planeAspect_ }
{}
VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ImagePlaneMemoryRequirementsInfo( *reinterpret_cast<ImagePlaneMemoryRequirementsInfo const *>( &rhs ) )
{}
ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
{
planeAspect = planeAspect_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo*>( this );
}
operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo*>( this );
}
operator VkImagePlaneMemoryRequirementsInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo*>( this );
}
operator VkImagePlaneMemoryRequirementsInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImagePlaneMemoryRequirementsInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, planeAspect );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default;
#else
bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( planeAspect == rhs.planeAspect );
#endif
}
bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
};
template <>
struct CppType<StructureType, StructureType::eImagePlaneMemoryRequirementsInfo>
{
using Type = ImagePlaneMemoryRequirementsInfo;
};
using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
// wrapper struct for struct VkImageResolve, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageResolve.html
struct ImageResolve
{
using NativeType = VkImageResolve;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageResolve(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
: srcSubresource{ srcSubresource_ }, srcOffset{ srcOffset_ }, dstSubresource{ dstSubresource_ }, dstOffset{ dstOffset_ }, extent{ extent_ }
{}
VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) )
{}
ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageResolve*>( this );
}
operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageResolve*>( this );
}
operator VkImageResolve const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageResolve*>( this );
}
operator VkImageResolve *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageResolve*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageResolve const & ) const = default;
#else
bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( srcSubresource == rhs.srcSubresource )
&& ( srcOffset == rhs.srcOffset )
&& ( dstSubresource == rhs.dstSubresource )
&& ( dstOffset == rhs.dstOffset )
&& ( extent == rhs.extent );
#endif
}
bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
};
// wrapper struct for struct VkImageResolve2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageResolve2.html
struct ImageResolve2
{
using NativeType = VkImageResolve2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageResolve2(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcSubresource{ srcSubresource_ }, srcOffset{ srcOffset_ }, dstSubresource{ dstSubresource_ }, dstOffset{ dstOffset_ }, extent{ extent_ }
{}
VULKAN_HPP_CONSTEXPR ImageResolve2( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageResolve2( *reinterpret_cast<ImageResolve2 const *>( &rhs ) )
{}
ImageResolve2 & operator=( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageResolve2 & operator=( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageResolve2*>( this );
}
operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageResolve2*>( this );
}
operator VkImageResolve2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageResolve2*>( this );
}
operator VkImageResolve2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageResolve2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageResolve2 const & ) const = default;
#else
bool operator==( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcSubresource == rhs.srcSubresource )
&& ( srcOffset == rhs.srcOffset )
&& ( dstSubresource == rhs.dstSubresource )
&& ( dstOffset == rhs.dstOffset )
&& ( extent == rhs.extent );
#endif
}
bool operator!=( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
};
template <>
struct CppType<StructureType, StructureType::eImageResolve2>
{
using Type = ImageResolve2;
};
using ImageResolve2KHR = ImageResolve2;
// wrapper struct for struct VkImageSparseMemoryRequirementsInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSparseMemoryRequirementsInfo2.html
struct ImageSparseMemoryRequirementsInfo2
{
using NativeType = VkImageSparseMemoryRequirementsInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, image{ image_ }
{}
VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageSparseMemoryRequirementsInfo2( *reinterpret_cast<ImageSparseMemoryRequirementsInfo2 const *>( &rhs ) )
{}
ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( this );
}
operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2*>( this );
}
operator VkImageSparseMemoryRequirementsInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( this );
}
operator VkImageSparseMemoryRequirementsInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageSparseMemoryRequirementsInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, image );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default;
#else
bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( image == rhs.image );
#endif
}
bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image image = {};
};
template <>
struct CppType<StructureType, StructureType::eImageSparseMemoryRequirementsInfo2>
{
using Type = ImageSparseMemoryRequirementsInfo2;
};
using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
// wrapper struct for struct VkImageStencilUsageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageStencilUsageCreateInfo.html
struct ImageStencilUsageCreateInfo
{
using NativeType = VkImageStencilUsageCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stencilUsage{ stencilUsage_ }
{}
VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageStencilUsageCreateInfo( *reinterpret_cast<ImageStencilUsageCreateInfo const *>( &rhs ) )
{}
ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
{
stencilUsage = stencilUsage_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageStencilUsageCreateInfo*>( this );
}
operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageStencilUsageCreateInfo*>( this );
}
operator VkImageStencilUsageCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageStencilUsageCreateInfo*>( this );
}
operator VkImageStencilUsageCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageStencilUsageCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stencilUsage );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default;
#else
bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stencilUsage == rhs.stencilUsage );
#endif
}
bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {};
};
template <>
struct CppType<StructureType, StructureType::eImageStencilUsageCreateInfo>
{
using Type = ImageStencilUsageCreateInfo;
};
using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
// wrapper struct for struct VkImageSwapchainCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSwapchainCreateInfoKHR.html
struct ImageSwapchainCreateInfoKHR
{
using NativeType = VkImageSwapchainCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, swapchain{ swapchain_ }
{}
VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageSwapchainCreateInfoKHR( *reinterpret_cast<ImageSwapchainCreateInfoKHR const *>( &rhs ) )
{}
ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
{
swapchain = swapchain_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR*>( this );
}
operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageSwapchainCreateInfoKHR*>( this );
}
operator VkImageSwapchainCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageSwapchainCreateInfoKHR*>( this );
}
operator VkImageSwapchainCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageSwapchainCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, swapchain );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default;
#else
bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchain == rhs.swapchain );
#endif
}
bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
};
template <>
struct CppType<StructureType, StructureType::eImageSwapchainCreateInfoKHR>
{
using Type = ImageSwapchainCreateInfoKHR;
};
// wrapper struct for struct VkImageViewASTCDecodeModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewASTCDecodeModeEXT.html
struct ImageViewASTCDecodeModeEXT
{
using NativeType = VkImageViewASTCDecodeModeEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT(VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, decodeMode{ decodeMode_ }
{}
VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewASTCDecodeModeEXT( *reinterpret_cast<ImageViewASTCDecodeModeEXT const *>( &rhs ) )
{}
ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
{
decodeMode = decodeMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT*>( this );
}
operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewASTCDecodeModeEXT*>( this );
}
operator VkImageViewASTCDecodeModeEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageViewASTCDecodeModeEXT*>( this );
}
operator VkImageViewASTCDecodeModeEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageViewASTCDecodeModeEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, decodeMode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default;
#else
bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( decodeMode == rhs.decodeMode );
#endif
}
bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::eImageViewAstcDecodeModeEXT>
{
using Type = ImageViewASTCDecodeModeEXT;
};
// wrapper struct for struct VkImageViewAddressPropertiesNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewAddressPropertiesNVX.html
struct ImageViewAddressPropertiesNVX
{
using NativeType = VkImageViewAddressPropertiesNVX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceAddress{ deviceAddress_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewAddressPropertiesNVX( *reinterpret_cast<ImageViewAddressPropertiesNVX const *>( &rhs ) )
{}
ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const *>( &rhs );
return *this;
}
operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewAddressPropertiesNVX*>( this );
}
operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewAddressPropertiesNVX*>( this );
}
operator VkImageViewAddressPropertiesNVX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageViewAddressPropertiesNVX*>( this );
}
operator VkImageViewAddressPropertiesNVX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageViewAddressPropertiesNVX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceAddress, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default;
#else
bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceAddress == rhs.deviceAddress )
&& ( size == rhs.size );
#endif
}
bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
template <>
struct CppType<StructureType, StructureType::eImageViewAddressPropertiesNVX>
{
using Type = ImageViewAddressPropertiesNVX;
};
// wrapper struct for struct VkImageViewCaptureDescriptorDataInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewCaptureDescriptorDataInfoEXT.html
struct ImageViewCaptureDescriptorDataInfoEXT
{
using NativeType = VkImageViewCaptureDescriptorDataInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCaptureDescriptorDataInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageView{ imageView_ }
{}
VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewCaptureDescriptorDataInfoEXT( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewCaptureDescriptorDataInfoEXT( *reinterpret_cast<ImageViewCaptureDescriptorDataInfoEXT const *>( &rhs ) )
{}
ImageViewCaptureDescriptorDataInfoEXT & operator=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageViewCaptureDescriptorDataInfoEXT & operator=( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageViewCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT*>( this );
}
operator VkImageViewCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewCaptureDescriptorDataInfoEXT*>( this );
}
operator VkImageViewCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT*>( this );
}
operator VkImageViewCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageViewCaptureDescriptorDataInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageView );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageViewCaptureDescriptorDataInfoEXT const & ) const = default;
#else
bool operator==( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageView == rhs.imageView );
#endif
}
bool operator!=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCaptureDescriptorDataInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
};
template <>
struct CppType<StructureType, StructureType::eImageViewCaptureDescriptorDataInfoEXT>
{
using Type = ImageViewCaptureDescriptorDataInfoEXT;
};
// wrapper struct for struct VkImageViewCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewCreateInfo.html
struct ImageViewCreateInfo
{
using NativeType = VkImageViewCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageViewCreateInfo(VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, image{ image_ }, viewType{ viewType_ }, format{ format_ }, components{ components_ }, subresourceRange{ subresourceRange_ }
{}
VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewCreateInfo( *reinterpret_cast<ImageViewCreateInfo const *>( &rhs ) )
{}
ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
{
viewType = viewType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
{
components = components_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
{
subresourceRange = subresourceRange_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewCreateInfo*>( this );
}
operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewCreateInfo*>( this );
}
operator VkImageViewCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageViewCreateInfo*>( this );
}
operator VkImageViewCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageViewCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageViewCreateFlags const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageViewType const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageViewCreateInfo const & ) const = default;
#else
bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( image == rhs.image )
&& ( viewType == rhs.viewType )
&& ( format == rhs.format )
&& ( components == rhs.components )
&& ( subresourceRange == rhs.subresourceRange );
#endif
}
bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
};
template <>
struct CppType<StructureType, StructureType::eImageViewCreateInfo>
{
using Type = ImageViewCreateInfo;
};
// wrapper struct for struct VkImageViewHandleInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewHandleInfoNVX.html
struct ImageViewHandleInfoNVX
{
using NativeType = VkImageViewHandleInfoNVX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageView{ imageView_ }, descriptorType{ descriptorType_ }, sampler{ sampler_ }
{}
VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewHandleInfoNVX( *reinterpret_cast<ImageViewHandleInfoNVX const *>( &rhs ) )
{}
ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
descriptorType = descriptorType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
{
sampler = sampler_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewHandleInfoNVX*>( this );
}
operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewHandleInfoNVX*>( this );
}
operator VkImageViewHandleInfoNVX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageViewHandleInfoNVX*>( this );
}
operator VkImageViewHandleInfoNVX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageViewHandleInfoNVX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, VULKAN_HPP_NAMESPACE::Sampler const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageView, descriptorType, sampler );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageViewHandleInfoNVX const & ) const = default;
#else
bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageView == rhs.imageView )
&& ( descriptorType == rhs.descriptorType )
&& ( sampler == rhs.sampler );
#endif
}
bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
VULKAN_HPP_NAMESPACE::Sampler sampler = {};
};
template <>
struct CppType<StructureType, StructureType::eImageViewHandleInfoNVX>
{
using Type = ImageViewHandleInfoNVX;
};
// wrapper struct for struct VkImageViewMinLodCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewMinLodCreateInfoEXT.html
struct ImageViewMinLodCreateInfoEXT
{
using NativeType = VkImageViewMinLodCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewMinLodCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT(float minLod_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minLod{ minLod_ }
{}
VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewMinLodCreateInfoEXT( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewMinLodCreateInfoEXT( *reinterpret_cast<ImageViewMinLodCreateInfoEXT const *>( &rhs ) )
{}
ImageViewMinLodCreateInfoEXT & operator=( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageViewMinLodCreateInfoEXT & operator=( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
{
minLod = minLod_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewMinLodCreateInfoEXT*>( this );
}
operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewMinLodCreateInfoEXT*>( this );
}
operator VkImageViewMinLodCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageViewMinLodCreateInfoEXT*>( this );
}
operator VkImageViewMinLodCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageViewMinLodCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minLod );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageViewMinLodCreateInfoEXT const & ) const = default;
#else
bool operator==( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minLod == rhs.minLod );
#endif
}
bool operator!=( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewMinLodCreateInfoEXT;
const void * pNext = {};
float minLod = {};
};
template <>
struct CppType<StructureType, StructureType::eImageViewMinLodCreateInfoEXT>
{
using Type = ImageViewMinLodCreateInfoEXT;
};
// wrapper struct for struct VkImageViewSampleWeightCreateInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewSampleWeightCreateInfoQCOM.html
struct ImageViewSampleWeightCreateInfoQCOM
{
using NativeType = VkImageViewSampleWeightCreateInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSampleWeightCreateInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM(VULKAN_HPP_NAMESPACE::Offset2D filterCenter_ = {}, VULKAN_HPP_NAMESPACE::Extent2D filterSize_ = {}, uint32_t numPhases_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, filterCenter{ filterCenter_ }, filterSize{ filterSize_ }, numPhases{ numPhases_ }
{}
VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewSampleWeightCreateInfoQCOM( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewSampleWeightCreateInfoQCOM( *reinterpret_cast<ImageViewSampleWeightCreateInfoQCOM const *>( &rhs ) )
{}
ImageViewSampleWeightCreateInfoQCOM & operator=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageViewSampleWeightCreateInfoQCOM & operator=( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterCenter( VULKAN_HPP_NAMESPACE::Offset2D const & filterCenter_ ) VULKAN_HPP_NOEXCEPT
{
filterCenter = filterCenter_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterSize( VULKAN_HPP_NAMESPACE::Extent2D const & filterSize_ ) VULKAN_HPP_NOEXCEPT
{
filterSize = filterSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setNumPhases( uint32_t numPhases_ ) VULKAN_HPP_NOEXCEPT
{
numPhases = numPhases_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageViewSampleWeightCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewSampleWeightCreateInfoQCOM*>( this );
}
operator VkImageViewSampleWeightCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewSampleWeightCreateInfoQCOM*>( this );
}
operator VkImageViewSampleWeightCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageViewSampleWeightCreateInfoQCOM*>( this );
}
operator VkImageViewSampleWeightCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageViewSampleWeightCreateInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, filterCenter, filterSize, numPhases );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageViewSampleWeightCreateInfoQCOM const & ) const = default;
#else
bool operator==( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( filterCenter == rhs.filterCenter )
&& ( filterSize == rhs.filterSize )
&& ( numPhases == rhs.numPhases );
#endif
}
bool operator!=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewSampleWeightCreateInfoQCOM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Offset2D filterCenter = {};
VULKAN_HPP_NAMESPACE::Extent2D filterSize = {};
uint32_t numPhases = {};
};
template <>
struct CppType<StructureType, StructureType::eImageViewSampleWeightCreateInfoQCOM>
{
using Type = ImageViewSampleWeightCreateInfoQCOM;
};
// wrapper struct for struct VkImageViewSlicedCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewSlicedCreateInfoEXT.html
struct ImageViewSlicedCreateInfoEXT
{
using NativeType = VkImageViewSlicedCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSlicedCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageViewSlicedCreateInfoEXT(uint32_t sliceOffset_ = {}, uint32_t sliceCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, sliceOffset{ sliceOffset_ }, sliceCount{ sliceCount_ }
{}
VULKAN_HPP_CONSTEXPR ImageViewSlicedCreateInfoEXT( ImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewSlicedCreateInfoEXT( VkImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewSlicedCreateInfoEXT( *reinterpret_cast<ImageViewSlicedCreateInfoEXT const *>( &rhs ) )
{}
ImageViewSlicedCreateInfoEXT & operator=( ImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageViewSlicedCreateInfoEXT & operator=( VkImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setSliceOffset( uint32_t sliceOffset_ ) VULKAN_HPP_NOEXCEPT
{
sliceOffset = sliceOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT
{
sliceCount = sliceCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageViewSlicedCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewSlicedCreateInfoEXT*>( this );
}
operator VkImageViewSlicedCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewSlicedCreateInfoEXT*>( this );
}
operator VkImageViewSlicedCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageViewSlicedCreateInfoEXT*>( this );
}
operator VkImageViewSlicedCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageViewSlicedCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, sliceOffset, sliceCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageViewSlicedCreateInfoEXT const & ) const = default;
#else
bool operator==( ImageViewSlicedCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( sliceOffset == rhs.sliceOffset )
&& ( sliceCount == rhs.sliceCount );
#endif
}
bool operator!=( ImageViewSlicedCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewSlicedCreateInfoEXT;
const void * pNext = {};
uint32_t sliceOffset = {};
uint32_t sliceCount = {};
};
template <>
struct CppType<StructureType, StructureType::eImageViewSlicedCreateInfoEXT>
{
using Type = ImageViewSlicedCreateInfoEXT;
};
// wrapper struct for struct VkImageViewUsageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewUsageCreateInfo.html
struct ImageViewUsageCreateInfo
{
using NativeType = VkImageViewUsageCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, usage{ usage_ }
{}
VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewUsageCreateInfo( *reinterpret_cast<ImageViewUsageCreateInfo const *>( &rhs ) )
{}
ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewUsageCreateInfo*>( this );
}
operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewUsageCreateInfo*>( this );
}
operator VkImageViewUsageCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImageViewUsageCreateInfo*>( this );
}
operator VkImageViewUsageCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImageViewUsageCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, usage );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImageViewUsageCreateInfo const & ) const = default;
#else
bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( usage == rhs.usage );
#endif
}
bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
};
template <>
struct CppType<StructureType, StructureType::eImageViewUsageCreateInfo>
{
using Type = ImageViewUsageCreateInfo;
};
using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
// wrapper struct for struct VkImportAndroidHardwareBufferInfoANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportAndroidHardwareBufferInfoANDROID.html
struct ImportAndroidHardwareBufferInfoANDROID
{
using NativeType = VkImportAndroidHardwareBufferInfoANDROID;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID(struct AHardwareBuffer * buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, buffer{ buffer_ }
{}
VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportAndroidHardwareBufferInfoANDROID( *reinterpret_cast<ImportAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
{}
ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID*>( this );
}
operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID*>( this );
}
operator VkImportAndroidHardwareBufferInfoANDROID const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID*>( this );
}
operator VkImportAndroidHardwareBufferInfoANDROID *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, struct AHardwareBuffer * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, buffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default;
#else
bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( buffer == rhs.buffer );
#endif
}
bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
const void * pNext = {};
struct AHardwareBuffer * buffer = {};
};
template <>
struct CppType<StructureType, StructureType::eImportAndroidHardwareBufferInfoANDROID>
{
using Type = ImportAndroidHardwareBufferInfoANDROID;
};
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
// wrapper struct for struct VkImportFenceFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportFenceFdInfoKHR.html
struct ImportFenceFdInfoKHR
{
using NativeType = VkImportFenceFdInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fence{ fence_ }, flags{ flags_ }, handleType{ handleType_ }, fd{ fd_ }
{}
VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportFenceFdInfoKHR( *reinterpret_cast<ImportFenceFdInfoKHR const *>( &rhs ) )
{}
ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
fence = fence_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
{
fd = fd_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportFenceFdInfoKHR*>( this );
}
operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportFenceFdInfoKHR*>( this );
}
operator VkImportFenceFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportFenceFdInfoKHR*>( this );
}
operator VkImportFenceFdInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportFenceFdInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Fence const &, VULKAN_HPP_NAMESPACE::FenceImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &, int const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fence, flags, handleType, fd );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportFenceFdInfoKHR const & ) const = default;
#else
bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fence == rhs.fence )
&& ( flags == rhs.flags )
&& ( handleType == rhs.handleType )
&& ( fd == rhs.fd );
#endif
}
bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Fence fence = {};
VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
int fd = {};
};
template <>
struct CppType<StructureType, StructureType::eImportFenceFdInfoKHR>
{
using Type = ImportFenceFdInfoKHR;
};
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkImportFenceWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportFenceWin32HandleInfoKHR.html
struct ImportFenceWin32HandleInfoKHR
{
using NativeType = VkImportFenceWin32HandleInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fence{ fence_ }, flags{ flags_ }, handleType{ handleType_ }, handle{ handle_ }, name{ name_ }
{}
VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportFenceWin32HandleInfoKHR( *reinterpret_cast<ImportFenceWin32HandleInfoKHR const *>( &rhs ) )
{}
ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
fence = fence_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
{
handle = handle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
{
name = name_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( this );
}
operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportFenceWin32HandleInfoKHR*>( this );
}
operator VkImportFenceWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( this );
}
operator VkImportFenceWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportFenceWin32HandleInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Fence const &, VULKAN_HPP_NAMESPACE::FenceImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &, HANDLE const &, LPCWSTR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fence, flags, handleType, handle, name );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default;
#else
bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fence == rhs.fence )
&& ( flags == rhs.flags )
&& ( handleType == rhs.handleType )
&& ( handle == rhs.handle )
&& ( name == rhs.name );
#endif
}
bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Fence fence = {};
VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
HANDLE handle = {};
LPCWSTR name = {};
};
template <>
struct CppType<StructureType, StructureType::eImportFenceWin32HandleInfoKHR>
{
using Type = ImportFenceWin32HandleInfoKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkImportMemoryBufferCollectionFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryBufferCollectionFUCHSIA.html
struct ImportMemoryBufferCollectionFUCHSIA
{
using NativeType = VkImportMemoryBufferCollectionFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryBufferCollectionFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, collection{ collection_ }, index{ index_ }
{}
VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryBufferCollectionFUCHSIA( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryBufferCollectionFUCHSIA( *reinterpret_cast<ImportMemoryBufferCollectionFUCHSIA const *>( &rhs ) )
{}
ImportMemoryBufferCollectionFUCHSIA & operator=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportMemoryBufferCollectionFUCHSIA & operator=( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
{
collection = collection_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
{
index = index_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryBufferCollectionFUCHSIA*>( this );
}
operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryBufferCollectionFUCHSIA*>( this );
}
operator VkImportMemoryBufferCollectionFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportMemoryBufferCollectionFUCHSIA*>( this );
}
operator VkImportMemoryBufferCollectionFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportMemoryBufferCollectionFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, collection, index );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportMemoryBufferCollectionFUCHSIA const & ) const = default;
#else
bool operator==( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( collection == rhs.collection )
&& ( index == rhs.index );
#endif
}
bool operator!=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryBufferCollectionFUCHSIA;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
uint32_t index = {};
};
template <>
struct CppType<StructureType, StructureType::eImportMemoryBufferCollectionFUCHSIA>
{
using Type = ImportMemoryBufferCollectionFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
// wrapper struct for struct VkImportMemoryFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryFdInfoKHR.html
struct ImportMemoryFdInfoKHR
{
using NativeType = VkImportMemoryFdInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleType{ handleType_ }, fd{ fd_ }
{}
VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryFdInfoKHR( *reinterpret_cast<ImportMemoryFdInfoKHR const *>( &rhs ) )
{}
ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
{
fd = fd_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryFdInfoKHR*>( this );
}
operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryFdInfoKHR*>( this );
}
operator VkImportMemoryFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportMemoryFdInfoKHR*>( this );
}
operator VkImportMemoryFdInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportMemoryFdInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, int const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleType, fd );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default;
#else
bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleType == rhs.handleType )
&& ( fd == rhs.fd );
#endif
}
bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
int fd = {};
};
template <>
struct CppType<StructureType, StructureType::eImportMemoryFdInfoKHR>
{
using Type = ImportMemoryFdInfoKHR;
};
// wrapper struct for struct VkImportMemoryHostPointerInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryHostPointerInfoEXT.html
struct ImportMemoryHostPointerInfoEXT
{
using NativeType = VkImportMemoryHostPointerInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void * pHostPointer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleType{ handleType_ }, pHostPointer{ pHostPointer_ }
{}
VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryHostPointerInfoEXT( *reinterpret_cast<ImportMemoryHostPointerInfoEXT const *>( &rhs ) )
{}
ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT
{
pHostPointer = pHostPointer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT*>( this );
}
operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT*>( this );
}
operator VkImportMemoryHostPointerInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportMemoryHostPointerInfoEXT*>( this );
}
operator VkImportMemoryHostPointerInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportMemoryHostPointerInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleType, pHostPointer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default;
#else
bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleType == rhs.handleType )
&& ( pHostPointer == rhs.pHostPointer );
#endif
}
bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
void * pHostPointer = {};
};
template <>
struct CppType<StructureType, StructureType::eImportMemoryHostPointerInfoEXT>
{
using Type = ImportMemoryHostPointerInfoEXT;
};
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkImportMemoryMetalHandleInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryMetalHandleInfoEXT.html
struct ImportMemoryMetalHandleInfoEXT
{
using NativeType = VkImportMemoryMetalHandleInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryMetalHandleInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportMemoryMetalHandleInfoEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void * handle_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleType{ handleType_ }, handle{ handle_ }
{}
VULKAN_HPP_CONSTEXPR ImportMemoryMetalHandleInfoEXT( ImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryMetalHandleInfoEXT( VkImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryMetalHandleInfoEXT( *reinterpret_cast<ImportMemoryMetalHandleInfoEXT const *>( &rhs ) )
{}
ImportMemoryMetalHandleInfoEXT & operator=( ImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportMemoryMetalHandleInfoEXT & operator=( VkImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryMetalHandleInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setHandle( void * handle_ ) VULKAN_HPP_NOEXCEPT
{
handle = handle_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportMemoryMetalHandleInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryMetalHandleInfoEXT*>( this );
}
operator VkImportMemoryMetalHandleInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryMetalHandleInfoEXT*>( this );
}
operator VkImportMemoryMetalHandleInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportMemoryMetalHandleInfoEXT*>( this );
}
operator VkImportMemoryMetalHandleInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportMemoryMetalHandleInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleType, handle );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportMemoryMetalHandleInfoEXT const & ) const = default;
#else
bool operator==( ImportMemoryMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleType == rhs.handleType )
&& ( handle == rhs.handle );
#endif
}
bool operator!=( ImportMemoryMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryMetalHandleInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
void * handle = {};
};
template <>
struct CppType<StructureType, StructureType::eImportMemoryMetalHandleInfoEXT>
{
using Type = ImportMemoryMetalHandleInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkImportMemoryWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryWin32HandleInfoKHR.html
struct ImportMemoryWin32HandleInfoKHR
{
using NativeType = VkImportMemoryWin32HandleInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleType{ handleType_ }, handle{ handle_ }, name{ name_ }
{}
VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryWin32HandleInfoKHR( *reinterpret_cast<ImportMemoryWin32HandleInfoKHR const *>( &rhs ) )
{}
ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
{
handle = handle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
{
name = name_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR*>( this );
}
operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR*>( this );
}
operator VkImportMemoryWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR*>( this );
}
operator VkImportMemoryWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportMemoryWin32HandleInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, HANDLE const &, LPCWSTR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleType, handle, name );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default;
#else
bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleType == rhs.handleType )
&& ( handle == rhs.handle )
&& ( name == rhs.name );
#endif
}
bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
HANDLE handle = {};
LPCWSTR name = {};
};
template <>
struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoKHR>
{
using Type = ImportMemoryWin32HandleInfoKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkImportMemoryWin32HandleInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryWin32HandleInfoNV.html
struct ImportMemoryWin32HandleInfoNV
{
using NativeType = VkImportMemoryWin32HandleInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, HANDLE handle_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleType{ handleType_ }, handle{ handle_ }
{}
VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryWin32HandleInfoNV( *reinterpret_cast<ImportMemoryWin32HandleInfoNV const *>( &rhs ) )
{}
ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
{
handle = handle_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>( this );
}
operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryWin32HandleInfoNV*>( this );
}
operator VkImportMemoryWin32HandleInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>( this );
}
operator VkImportMemoryWin32HandleInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportMemoryWin32HandleInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &, HANDLE const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleType, handle );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default;
#else
bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleType == rhs.handleType )
&& ( handle == rhs.handle );
#endif
}
bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {};
HANDLE handle = {};
};
template <>
struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoNV>
{
using Type = ImportMemoryWin32HandleInfoNV;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkImportMemoryZirconHandleInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryZirconHandleInfoFUCHSIA.html
struct ImportMemoryZirconHandleInfoFUCHSIA
{
using NativeType = VkImportMemoryZirconHandleInfoFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, zx_handle_t handle_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleType{ handleType_ }, handle{ handle_ }
{}
VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast<ImportMemoryZirconHandleInfoFUCHSIA const *>( &rhs ) )
{}
ImportMemoryZirconHandleInfoFUCHSIA & operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportMemoryZirconHandleInfoFUCHSIA & operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) VULKAN_HPP_NOEXCEPT
{
handle = handle_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryZirconHandleInfoFUCHSIA*>( this );
}
operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryZirconHandleInfoFUCHSIA*>( this );
}
operator VkImportMemoryZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportMemoryZirconHandleInfoFUCHSIA*>( this );
}
operator VkImportMemoryZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportMemoryZirconHandleInfoFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, zx_handle_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleType, handle );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleType == rhs.handleType )
&& ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 );
}
bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
zx_handle_t handle = {};
};
template <>
struct CppType<StructureType, StructureType::eImportMemoryZirconHandleInfoFUCHSIA>
{
using Type = ImportMemoryZirconHandleInfoFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkImportMetalBufferInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalBufferInfoEXT.html
struct ImportMetalBufferInfoEXT
{
using NativeType = VkImportMetalBufferInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalBufferInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT(MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, mtlBuffer{ mtlBuffer_ }
{}
VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMetalBufferInfoEXT( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMetalBufferInfoEXT( *reinterpret_cast<ImportMetalBufferInfoEXT const *>( &rhs ) )
{}
ImportMetalBufferInfoEXT & operator=( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportMetalBufferInfoEXT & operator=( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT
{
mtlBuffer = mtlBuffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMetalBufferInfoEXT*>( this );
}
operator VkImportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMetalBufferInfoEXT*>( this );
}
operator VkImportMetalBufferInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportMetalBufferInfoEXT*>( this );
}
operator VkImportMetalBufferInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportMetalBufferInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, MTLBuffer_id const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, mtlBuffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportMetalBufferInfoEXT const & ) const = default;
#else
bool operator==( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( mtlBuffer == rhs.mtlBuffer );
#endif
}
bool operator!=( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalBufferInfoEXT;
const void * pNext = {};
MTLBuffer_id mtlBuffer = {};
};
template <>
struct CppType<StructureType, StructureType::eImportMetalBufferInfoEXT>
{
using Type = ImportMetalBufferInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkImportMetalIOSurfaceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalIOSurfaceInfoEXT.html
struct ImportMetalIOSurfaceInfoEXT
{
using NativeType = VkImportMetalIOSurfaceInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalIoSurfaceInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT(IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, ioSurface{ ioSurface_ }
{}
VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMetalIOSurfaceInfoEXT( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMetalIOSurfaceInfoEXT( *reinterpret_cast<ImportMetalIOSurfaceInfoEXT const *>( &rhs ) )
{}
ImportMetalIOSurfaceInfoEXT & operator=( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportMetalIOSurfaceInfoEXT & operator=( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT
{
ioSurface = ioSurface_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMetalIOSurfaceInfoEXT*>( this );
}
operator VkImportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMetalIOSurfaceInfoEXT*>( this );
}
operator VkImportMetalIOSurfaceInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportMetalIOSurfaceInfoEXT*>( this );
}
operator VkImportMetalIOSurfaceInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportMetalIOSurfaceInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, IOSurfaceRef const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, ioSurface );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportMetalIOSurfaceInfoEXT const & ) const = default;
#else
bool operator==( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( ioSurface == rhs.ioSurface );
#endif
}
bool operator!=( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalIoSurfaceInfoEXT;
const void * pNext = {};
IOSurfaceRef ioSurface = {};
};
template <>
struct CppType<StructureType, StructureType::eImportMetalIoSurfaceInfoEXT>
{
using Type = ImportMetalIOSurfaceInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkImportMetalSharedEventInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalSharedEventInfoEXT.html
struct ImportMetalSharedEventInfoEXT
{
using NativeType = VkImportMetalSharedEventInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalSharedEventInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT(MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, mtlSharedEvent{ mtlSharedEvent_ }
{}
VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMetalSharedEventInfoEXT( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMetalSharedEventInfoEXT( *reinterpret_cast<ImportMetalSharedEventInfoEXT const *>( &rhs ) )
{}
ImportMetalSharedEventInfoEXT & operator=( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportMetalSharedEventInfoEXT & operator=( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT
{
mtlSharedEvent = mtlSharedEvent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMetalSharedEventInfoEXT*>( this );
}
operator VkImportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMetalSharedEventInfoEXT*>( this );
}
operator VkImportMetalSharedEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportMetalSharedEventInfoEXT*>( this );
}
operator VkImportMetalSharedEventInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportMetalSharedEventInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, MTLSharedEvent_id const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, mtlSharedEvent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportMetalSharedEventInfoEXT const & ) const = default;
#else
bool operator==( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( mtlSharedEvent == rhs.mtlSharedEvent );
#endif
}
bool operator!=( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalSharedEventInfoEXT;
const void * pNext = {};
MTLSharedEvent_id mtlSharedEvent = {};
};
template <>
struct CppType<StructureType, StructureType::eImportMetalSharedEventInfoEXT>
{
using Type = ImportMetalSharedEventInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkImportMetalTextureInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalTextureInfoEXT.html
struct ImportMetalTextureInfoEXT
{
using NativeType = VkImportMetalTextureInfoEXT;
static const bool allowDuplicate = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalTextureInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, MTLTexture_id mtlTexture_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, plane{ plane_ }, mtlTexture{ mtlTexture_ }
{}
VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMetalTextureInfoEXT( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMetalTextureInfoEXT( *reinterpret_cast<ImportMetalTextureInfoEXT const *>( &rhs ) )
{}
ImportMetalTextureInfoEXT & operator=( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportMetalTextureInfoEXT & operator=( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT
{
plane = plane_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT
{
mtlTexture = mtlTexture_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMetalTextureInfoEXT*>( this );
}
operator VkImportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMetalTextureInfoEXT*>( this );
}
operator VkImportMetalTextureInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportMetalTextureInfoEXT*>( this );
}
operator VkImportMetalTextureInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportMetalTextureInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &, MTLTexture_id const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, plane, mtlTexture );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportMetalTextureInfoEXT const & ) const = default;
#else
bool operator==( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( plane == rhs.plane )
&& ( mtlTexture == rhs.mtlTexture );
#endif
}
bool operator!=( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalTextureInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
MTLTexture_id mtlTexture = {};
};
template <>
struct CppType<StructureType, StructureType::eImportMetalTextureInfoEXT>
{
using Type = ImportMetalTextureInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
// wrapper struct for struct VkImportScreenBufferInfoQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportScreenBufferInfoQNX.html
struct ImportScreenBufferInfoQNX
{
using NativeType = VkImportScreenBufferInfoQNX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportScreenBufferInfoQNX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX(struct _screen_buffer * buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, buffer{ buffer_ }
{}
VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportScreenBufferInfoQNX( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportScreenBufferInfoQNX( *reinterpret_cast<ImportScreenBufferInfoQNX const *>( &rhs ) )
{}
ImportScreenBufferInfoQNX & operator=( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportScreenBufferInfoQNX & operator=( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setBuffer( struct _screen_buffer * buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportScreenBufferInfoQNX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportScreenBufferInfoQNX*>( this );
}
operator VkImportScreenBufferInfoQNX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportScreenBufferInfoQNX*>( this );
}
operator VkImportScreenBufferInfoQNX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportScreenBufferInfoQNX*>( this );
}
operator VkImportScreenBufferInfoQNX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportScreenBufferInfoQNX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, struct _screen_buffer * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, buffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportScreenBufferInfoQNX const & ) const = default;
#else
bool operator==( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( buffer == rhs.buffer );
#endif
}
bool operator!=( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportScreenBufferInfoQNX;
const void * pNext = {};
struct _screen_buffer * buffer = {};
};
template <>
struct CppType<StructureType, StructureType::eImportScreenBufferInfoQNX>
{
using Type = ImportScreenBufferInfoQNX;
};
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
// wrapper struct for struct VkImportSemaphoreFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportSemaphoreFdInfoKHR.html
struct ImportSemaphoreFdInfoKHR
{
using NativeType = VkImportSemaphoreFdInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, semaphore{ semaphore_ }, flags{ flags_ }, handleType{ handleType_ }, fd{ fd_ }
{}
VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportSemaphoreFdInfoKHR( *reinterpret_cast<ImportSemaphoreFdInfoKHR const *>( &rhs ) )
{}
ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
{
fd = fd_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( this );
}
operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportSemaphoreFdInfoKHR*>( this );
}
operator VkImportSemaphoreFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( this );
}
operator VkImportSemaphoreFdInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportSemaphoreFdInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &, int const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, semaphore, flags, handleType, fd );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default;
#else
bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
&& ( flags == rhs.flags )
&& ( handleType == rhs.handleType )
&& ( fd == rhs.fd );
#endif
}
bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
int fd = {};
};
template <>
struct CppType<StructureType, StructureType::eImportSemaphoreFdInfoKHR>
{
using Type = ImportSemaphoreFdInfoKHR;
};
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkImportSemaphoreWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportSemaphoreWin32HandleInfoKHR.html
struct ImportSemaphoreWin32HandleInfoKHR
{
using NativeType = VkImportSemaphoreWin32HandleInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, semaphore{ semaphore_ }, flags{ flags_ }, handleType{ handleType_ }, handle{ handle_ }, name{ name_ }
{}
VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ImportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
{}
ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
{
handle = handle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
{
name = name_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( this );
}
operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR*>( this );
}
operator VkImportSemaphoreWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( this );
}
operator VkImportSemaphoreWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &, HANDLE const &, LPCWSTR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, semaphore, flags, handleType, handle, name );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default;
#else
bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
&& ( flags == rhs.flags )
&& ( handleType == rhs.handleType )
&& ( handle == rhs.handle )
&& ( name == rhs.name );
#endif
}
bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
HANDLE handle = {};
LPCWSTR name = {};
};
template <>
struct CppType<StructureType, StructureType::eImportSemaphoreWin32HandleInfoKHR>
{
using Type = ImportSemaphoreWin32HandleInfoKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkImportSemaphoreZirconHandleInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportSemaphoreZirconHandleInfoFUCHSIA.html
struct ImportSemaphoreZirconHandleInfoFUCHSIA
{
using NativeType = VkImportSemaphoreZirconHandleInfoFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, zx_handle_t zirconHandle_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, semaphore{ semaphore_ }, flags{ flags_ }, handleType{ handleType_ }, zirconHandle{ zirconHandle_ }
{}
VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportSemaphoreZirconHandleInfoFUCHSIA( *reinterpret_cast<ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs ) )
{}
ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT
{
zirconHandle = zirconHandle_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA*>( this );
}
operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportSemaphoreZirconHandleInfoFUCHSIA*>( this );
}
operator VkImportSemaphoreZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA*>( this );
}
operator VkImportSemaphoreZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkImportSemaphoreZirconHandleInfoFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &, zx_handle_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, semaphore, flags, handleType, zirconHandle );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = semaphore <=> rhs.semaphore; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
&& ( flags == rhs.flags )
&& ( handleType == rhs.handleType )
&& ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 );
}
bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
zx_handle_t zirconHandle = {};
};
template <>
struct CppType<StructureType, StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA>
{
using Type = ImportSemaphoreZirconHandleInfoFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
// wrapper struct for struct VkIndirectCommandsExecutionSetTokenEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsExecutionSetTokenEXT.html
struct IndirectCommandsExecutionSetTokenEXT
{
using NativeType = VkIndirectCommandsExecutionSetTokenEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IndirectCommandsExecutionSetTokenEXT(VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ = {}) VULKAN_HPP_NOEXCEPT
: type{ type_ }, shaderStages{ shaderStages_ }
{}
VULKAN_HPP_CONSTEXPR IndirectCommandsExecutionSetTokenEXT( IndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsExecutionSetTokenEXT( VkIndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectCommandsExecutionSetTokenEXT( *reinterpret_cast<IndirectCommandsExecutionSetTokenEXT const *>( &rhs ) )
{}
IndirectCommandsExecutionSetTokenEXT & operator=( IndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectCommandsExecutionSetTokenEXT & operator=( VkIndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT & setShaderStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT
{
shaderStages = shaderStages_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectCommandsExecutionSetTokenEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectCommandsExecutionSetTokenEXT*>( this );
}
operator VkIndirectCommandsExecutionSetTokenEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectCommandsExecutionSetTokenEXT*>( this );
}
operator VkIndirectCommandsExecutionSetTokenEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectCommandsExecutionSetTokenEXT*>( this );
}
operator VkIndirectCommandsExecutionSetTokenEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectCommandsExecutionSetTokenEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( type, shaderStages );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( IndirectCommandsExecutionSetTokenEXT const & ) const = default;
#else
bool operator==( IndirectCommandsExecutionSetTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( type == rhs.type )
&& ( shaderStages == rhs.shaderStages );
#endif
}
bool operator!=( IndirectCommandsExecutionSetTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines;
VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages = {};
};
// wrapper struct for struct VkIndirectCommandsIndexBufferTokenEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsIndexBufferTokenEXT.html
struct IndirectCommandsIndexBufferTokenEXT
{
using NativeType = VkIndirectCommandsIndexBufferTokenEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IndirectCommandsIndexBufferTokenEXT(VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT mode_ = VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer) VULKAN_HPP_NOEXCEPT
: mode{ mode_ }
{}
VULKAN_HPP_CONSTEXPR IndirectCommandsIndexBufferTokenEXT( IndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsIndexBufferTokenEXT( VkIndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectCommandsIndexBufferTokenEXT( *reinterpret_cast<IndirectCommandsIndexBufferTokenEXT const *>( &rhs ) )
{}
IndirectCommandsIndexBufferTokenEXT & operator=( IndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectCommandsIndexBufferTokenEXT & operator=( VkIndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsIndexBufferTokenEXT & setMode( VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectCommandsIndexBufferTokenEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectCommandsIndexBufferTokenEXT*>( this );
}
operator VkIndirectCommandsIndexBufferTokenEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectCommandsIndexBufferTokenEXT*>( this );
}
operator VkIndirectCommandsIndexBufferTokenEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectCommandsIndexBufferTokenEXT*>( this );
}
operator VkIndirectCommandsIndexBufferTokenEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectCommandsIndexBufferTokenEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( mode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( IndirectCommandsIndexBufferTokenEXT const & ) const = default;
#else
bool operator==( IndirectCommandsIndexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( mode == rhs.mode );
#endif
}
bool operator!=( IndirectCommandsIndexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT mode = VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer;
};
// wrapper struct for struct VkPushConstantRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushConstantRange.html
struct PushConstantRange
{
using NativeType = VkPushConstantRange;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PushConstantRange(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {}) VULKAN_HPP_NOEXCEPT
: stageFlags{ stageFlags_ }, offset{ offset_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
: PushConstantRange( *reinterpret_cast<PushConstantRange const *>( &rhs ) )
{}
PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushConstantRange const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
{
stageFlags = stageFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPushConstantRange*>( this );
}
operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPushConstantRange*>( this );
}
operator VkPushConstantRange const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPushConstantRange*>( this );
}
operator VkPushConstantRange *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPushConstantRange*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( stageFlags, offset, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PushConstantRange const & ) const = default;
#else
bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( stageFlags == rhs.stageFlags )
&& ( offset == rhs.offset )
&& ( size == rhs.size );
#endif
}
bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
uint32_t offset = {};
uint32_t size = {};
};
// wrapper struct for struct VkIndirectCommandsPushConstantTokenEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsPushConstantTokenEXT.html
struct IndirectCommandsPushConstantTokenEXT
{
using NativeType = VkIndirectCommandsPushConstantTokenEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IndirectCommandsPushConstantTokenEXT(VULKAN_HPP_NAMESPACE::PushConstantRange updateRange_ = {}) VULKAN_HPP_NOEXCEPT
: updateRange{ updateRange_ }
{}
VULKAN_HPP_CONSTEXPR IndirectCommandsPushConstantTokenEXT( IndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsPushConstantTokenEXT( VkIndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectCommandsPushConstantTokenEXT( *reinterpret_cast<IndirectCommandsPushConstantTokenEXT const *>( &rhs ) )
{}
IndirectCommandsPushConstantTokenEXT & operator=( IndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectCommandsPushConstantTokenEXT & operator=( VkIndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsPushConstantTokenEXT & setUpdateRange( VULKAN_HPP_NAMESPACE::PushConstantRange const & updateRange_ ) VULKAN_HPP_NOEXCEPT
{
updateRange = updateRange_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectCommandsPushConstantTokenEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectCommandsPushConstantTokenEXT*>( this );
}
operator VkIndirectCommandsPushConstantTokenEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectCommandsPushConstantTokenEXT*>( this );
}
operator VkIndirectCommandsPushConstantTokenEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectCommandsPushConstantTokenEXT*>( this );
}
operator VkIndirectCommandsPushConstantTokenEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectCommandsPushConstantTokenEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::PushConstantRange const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( updateRange );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( IndirectCommandsPushConstantTokenEXT const & ) const = default;
#else
bool operator==( IndirectCommandsPushConstantTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( updateRange == rhs.updateRange );
#endif
}
bool operator!=( IndirectCommandsPushConstantTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::PushConstantRange updateRange = {};
};
// wrapper struct for struct VkIndirectCommandsVertexBufferTokenEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsVertexBufferTokenEXT.html
struct IndirectCommandsVertexBufferTokenEXT
{
using NativeType = VkIndirectCommandsVertexBufferTokenEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IndirectCommandsVertexBufferTokenEXT(uint32_t vertexBindingUnit_ = {}) VULKAN_HPP_NOEXCEPT
: vertexBindingUnit{ vertexBindingUnit_ }
{}
VULKAN_HPP_CONSTEXPR IndirectCommandsVertexBufferTokenEXT( IndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsVertexBufferTokenEXT( VkIndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectCommandsVertexBufferTokenEXT( *reinterpret_cast<IndirectCommandsVertexBufferTokenEXT const *>( &rhs ) )
{}
IndirectCommandsVertexBufferTokenEXT & operator=( IndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectCommandsVertexBufferTokenEXT & operator=( VkIndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsVertexBufferTokenEXT & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT
{
vertexBindingUnit = vertexBindingUnit_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectCommandsVertexBufferTokenEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectCommandsVertexBufferTokenEXT*>( this );
}
operator VkIndirectCommandsVertexBufferTokenEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectCommandsVertexBufferTokenEXT*>( this );
}
operator VkIndirectCommandsVertexBufferTokenEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectCommandsVertexBufferTokenEXT*>( this );
}
operator VkIndirectCommandsVertexBufferTokenEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectCommandsVertexBufferTokenEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( vertexBindingUnit );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( IndirectCommandsVertexBufferTokenEXT const & ) const = default;
#else
bool operator==( IndirectCommandsVertexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( vertexBindingUnit == rhs.vertexBindingUnit );
#endif
}
bool operator!=( IndirectCommandsVertexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t vertexBindingUnit = {};
};
union IndirectCommandsTokenDataEXT
{
using NativeType = VkIndirectCommandsTokenDataEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT * pPushConstant_ = {} )
: pPushConstant( pPushConstant_ )
{}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ )
: pVertexBuffer( pVertexBuffer_ )
{}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ )
: pIndexBuffer( pIndexBuffer_ )
{}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ )
: pExecutionSet( pExecutionSet_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & setPPushConstant( const VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT * pPushConstant_ ) VULKAN_HPP_NOEXCEPT
{
pPushConstant = pPushConstant_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & setPVertexBuffer( const VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
pVertexBuffer = pVertexBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & setPIndexBuffer( const VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
pIndexBuffer = pIndexBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & setPExecutionSet( const VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ ) VULKAN_HPP_NOEXCEPT
{
pExecutionSet = pExecutionSet_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectCommandsTokenDataEXT const &() const
{
return *reinterpret_cast<const VkIndirectCommandsTokenDataEXT*>( this );
}
operator VkIndirectCommandsTokenDataEXT &()
{
return *reinterpret_cast<VkIndirectCommandsTokenDataEXT*>( this );
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
const VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT * pPushConstant;
const VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT * pVertexBuffer;
const VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT * pIndexBuffer;
const VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT * pExecutionSet;
#else
const VkIndirectCommandsPushConstantTokenEXT * pPushConstant;
const VkIndirectCommandsVertexBufferTokenEXT * pVertexBuffer;
const VkIndirectCommandsIndexBufferTokenEXT * pIndexBuffer;
const VkIndirectCommandsExecutionSetTokenEXT * pExecutionSet;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
// wrapper struct for struct VkIndirectCommandsLayoutTokenEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutTokenEXT.html
struct IndirectCommandsLayoutTokenEXT
{
using NativeType = VkIndirectCommandsLayoutTokenEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT(VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT::eExecutionSet, VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT data_ = {}, uint32_t offset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, type{ type_ }, data{ data_ }, offset{ offset_ }
{}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT( IndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsLayoutTokenEXT( VkIndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectCommandsLayoutTokenEXT( *reinterpret_cast<IndirectCommandsLayoutTokenEXT const *>( &rhs ) )
{}
IndirectCommandsLayoutTokenEXT & operator=( IndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectCommandsLayoutTokenEXT & operator=( VkIndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setData( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT const & data_ ) VULKAN_HPP_NOEXCEPT
{
data = data_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectCommandsLayoutTokenEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectCommandsLayoutTokenEXT*>( this );
}
operator VkIndirectCommandsLayoutTokenEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectCommandsLayoutTokenEXT*>( this );
}
operator VkIndirectCommandsLayoutTokenEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectCommandsLayoutTokenEXT*>( this );
}
operator VkIndirectCommandsLayoutTokenEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectCommandsLayoutTokenEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT const &, VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, type, data, offset );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT::eExecutionSet;
VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT data = {};
uint32_t offset = {};
};
template <>
struct CppType<StructureType, StructureType::eIndirectCommandsLayoutTokenEXT>
{
using Type = IndirectCommandsLayoutTokenEXT;
};
// wrapper struct for struct VkIndirectCommandsLayoutCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutCreateInfoEXT.html
struct IndirectCommandsLayoutCreateInfoEXT
{
using NativeType = VkIndirectCommandsLayoutCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoEXT(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ = {}, uint32_t indirectStride_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t tokenCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * pTokens_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, shaderStages{ shaderStages_ }, indirectStride{ indirectStride_ }, pipelineLayout{ pipelineLayout_ }, tokenCount{ tokenCount_ }, pTokens{ pTokens_ }
{}
VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoEXT( IndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsLayoutCreateInfoEXT( VkIndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectCommandsLayoutCreateInfoEXT( *reinterpret_cast<IndirectCommandsLayoutCreateInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectCommandsLayoutCreateInfoEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags_, VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_, uint32_t indirectStride_, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT> const & tokens_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), shaderStages( shaderStages_ ), indirectStride( indirectStride_ ), pipelineLayout( pipelineLayout_ ), tokenCount( static_cast<uint32_t>( tokens_.size() ) ), pTokens( tokens_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
IndirectCommandsLayoutCreateInfoEXT & operator=( IndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectCommandsLayoutCreateInfoEXT & operator=( VkIndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setShaderStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT
{
shaderStages = shaderStages_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setIndirectStride( uint32_t indirectStride_ ) VULKAN_HPP_NOEXCEPT
{
indirectStride = indirectStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
{
pipelineLayout = pipelineLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
{
tokenCount = tokenCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * pTokens_ ) VULKAN_HPP_NOEXCEPT
{
pTokens = pTokens_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectCommandsLayoutCreateInfoEXT & setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT> const & tokens_ ) VULKAN_HPP_NOEXCEPT
{
tokenCount = static_cast<uint32_t>( tokens_.size() );
pTokens = tokens_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectCommandsLayoutCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT*>( this );
}
operator VkIndirectCommandsLayoutCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoEXT*>( this );
}
operator VkIndirectCommandsLayoutCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT*>( this );
}
operator VkIndirectCommandsLayoutCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectCommandsLayoutCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, shaderStages, indirectStride, pipelineLayout, tokenCount, pTokens );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( IndirectCommandsLayoutCreateInfoEXT const & ) const = default;
#else
bool operator==( IndirectCommandsLayoutCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( shaderStages == rhs.shaderStages )
&& ( indirectStride == rhs.indirectStride )
&& ( pipelineLayout == rhs.pipelineLayout )
&& ( tokenCount == rhs.tokenCount )
&& ( pTokens == rhs.pTokens );
#endif
}
bool operator!=( IndirectCommandsLayoutCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages = {};
uint32_t indirectStride = {};
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
uint32_t tokenCount = {};
const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * pTokens = {};
};
template <>
struct CppType<StructureType, StructureType::eIndirectCommandsLayoutCreateInfoEXT>
{
using Type = IndirectCommandsLayoutCreateInfoEXT;
};
// wrapper struct for struct VkIndirectCommandsLayoutTokenNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutTokenNV.html
struct IndirectCommandsLayoutTokenNV
{
using NativeType = VkIndirectCommandsLayoutTokenNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV(VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, uint32_t stream_ = {}, uint32_t offset_ = {}, uint32_t vertexBindingUnit_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, uint32_t pushconstantOffset_ = {}, uint32_t pushconstantSize_ = {}, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, uint32_t indexTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {}, const uint32_t * pIndexTypeValues_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tokenType{ tokenType_ }, stream{ stream_ }, offset{ offset_ }, vertexBindingUnit{ vertexBindingUnit_ }, vertexDynamicStride{ vertexDynamicStride_ }, pushconstantPipelineLayout{ pushconstantPipelineLayout_ }, pushconstantShaderStageFlags{ pushconstantShaderStageFlags_ }, pushconstantOffset{ pushconstantOffset_ }, pushconstantSize{ pushconstantSize_ }, indirectStateFlags{ indirectStateFlags_ }, indexTypeCount{ indexTypeCount_ }, pIndexTypes{ pIndexTypes_ }, pIndexTypeValues{ pIndexTypeValues_ }
{}
VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectCommandsLayoutTokenNV( *reinterpret_cast<IndirectCommandsLayoutTokenNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, uint32_t stream_, uint32_t offset_, uint32_t vertexBindingUnit_, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, uint32_t pushconstantOffset_, uint32_t pushconstantSize_, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( static_cast<uint32_t>( indexTypes_.size() ) ), pIndexTypes( indexTypes_.data() ), pIndexTypeValues( indexTypeValues_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() );
#else
if ( indexTypes_.size() != indexTypeValues_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT
{
tokenType = tokenType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT
{
stream = stream_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT
{
vertexBindingUnit = vertexBindingUnit_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT
{
vertexDynamicStride = vertexDynamicStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
{
pushconstantPipelineLayout = pushconstantPipelineLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT
{
pushconstantShaderStageFlags = pushconstantShaderStageFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT
{
pushconstantOffset = pushconstantOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT
{
pushconstantSize = pushconstantSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT
{
indirectStateFlags = indirectStateFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT
{
indexTypeCount = indexTypeCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT
{
pIndexTypes = pIndexTypes_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectCommandsLayoutTokenNV & setIndexTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_ ) VULKAN_HPP_NOEXCEPT
{
indexTypeCount = static_cast<uint32_t>( indexTypes_.size() );
pIndexTypes = indexTypes_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT
{
pIndexTypeValues = pIndexTypeValues_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectCommandsLayoutTokenNV & setIndexTypeValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT
{
indexTypeCount = static_cast<uint32_t>( indexTypeValues_.size() );
pIndexTypeValues = indexTypeValues_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNV*>( this );
}
operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectCommandsLayoutTokenNV*>( this );
}
operator VkIndirectCommandsLayoutTokenNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectCommandsLayoutTokenNV*>( this );
}
operator VkIndirectCommandsLayoutTokenNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectCommandsLayoutTokenNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::IndexType * const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tokenType, stream, offset, vertexBindingUnit, vertexDynamicStride, pushconstantPipelineLayout, pushconstantShaderStageFlags, pushconstantOffset, pushconstantSize, indirectStateFlags, indexTypeCount, pIndexTypes, pIndexTypeValues );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default;
#else
bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tokenType == rhs.tokenType )
&& ( stream == rhs.stream )
&& ( offset == rhs.offset )
&& ( vertexBindingUnit == rhs.vertexBindingUnit )
&& ( vertexDynamicStride == rhs.vertexDynamicStride )
&& ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout )
&& ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags )
&& ( pushconstantOffset == rhs.pushconstantOffset )
&& ( pushconstantSize == rhs.pushconstantSize )
&& ( indirectStateFlags == rhs.indirectStateFlags )
&& ( indexTypeCount == rhs.indexTypeCount )
&& ( pIndexTypes == rhs.pIndexTypes )
&& ( pIndexTypeValues == rhs.pIndexTypeValues );
#endif
}
bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup;
uint32_t stream = {};
uint32_t offset = {};
uint32_t vertexBindingUnit = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {};
VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {};
uint32_t pushconstantOffset = {};
uint32_t pushconstantSize = {};
VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {};
uint32_t indexTypeCount = {};
const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {};
const uint32_t * pIndexTypeValues = {};
};
template <>
struct CppType<StructureType, StructureType::eIndirectCommandsLayoutTokenNV>
{
using Type = IndirectCommandsLayoutTokenNV;
};
// wrapper struct for struct VkIndirectCommandsLayoutCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutCreateInfoNV.html
struct IndirectCommandsLayoutCreateInfoNV
{
using NativeType = VkIndirectCommandsLayoutCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t tokenCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {}, uint32_t streamCount_ = {}, const uint32_t * pStreamStrides_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pipelineBindPoint{ pipelineBindPoint_ }, tokenCount{ tokenCount_ }, pTokens{ pTokens_ }, streamCount{ streamCount_ }, pStreamStrides{ pStreamStrides_ }
{}
VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast<IndirectCommandsLayoutCreateInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( static_cast<uint32_t>( tokens_.size() ) ), pTokens( tokens_.data() ), streamCount( static_cast<uint32_t>( streamStrides_.size() ) ), pStreamStrides( streamStrides_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
{
tokenCount = tokenCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT
{
pTokens = pTokens_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectCommandsLayoutCreateInfoNV & setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_ ) VULKAN_HPP_NOEXCEPT
{
tokenCount = static_cast<uint32_t>( tokens_.size() );
pTokens = tokens_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
{
streamCount = streamCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT
{
pStreamStrides = pStreamStrides_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectCommandsLayoutCreateInfoNV & setStreamStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ ) VULKAN_HPP_NOEXCEPT
{
streamCount = static_cast<uint32_t>( streamStrides_.size() );
pStreamStrides = streamStrides_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV*>( this );
}
operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNV*>( this );
}
operator VkIndirectCommandsLayoutCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV*>( this );
}
operator VkIndirectCommandsLayoutCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pipelineBindPoint, tokenCount, pTokens, streamCount, pStreamStrides );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default;
#else
bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
&& ( tokenCount == rhs.tokenCount )
&& ( pTokens == rhs.pTokens )
&& ( streamCount == rhs.streamCount )
&& ( pStreamStrides == rhs.pStreamStrides );
#endif
}
bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {};
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
uint32_t tokenCount = {};
const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {};
uint32_t streamCount = {};
const uint32_t * pStreamStrides = {};
};
template <>
struct CppType<StructureType, StructureType::eIndirectCommandsLayoutCreateInfoNV>
{
using Type = IndirectCommandsLayoutCreateInfoNV;
};
// wrapper struct for struct VkIndirectExecutionSetPipelineInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetPipelineInfoEXT.html
struct IndirectExecutionSetPipelineInfoEXT
{
using NativeType = VkIndirectExecutionSetPipelineInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetPipelineInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT(VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ = {}, uint32_t maxPipelineCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, initialPipeline{ initialPipeline_ }, maxPipelineCount{ maxPipelineCount_ }
{}
VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectExecutionSetPipelineInfoEXT( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectExecutionSetPipelineInfoEXT( *reinterpret_cast<IndirectExecutionSetPipelineInfoEXT const *>( &rhs ) )
{}
IndirectExecutionSetPipelineInfoEXT & operator=( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectExecutionSetPipelineInfoEXT & operator=( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setInitialPipeline( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ ) VULKAN_HPP_NOEXCEPT
{
initialPipeline = initialPipeline_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setMaxPipelineCount( uint32_t maxPipelineCount_ ) VULKAN_HPP_NOEXCEPT
{
maxPipelineCount = maxPipelineCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectExecutionSetPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectExecutionSetPipelineInfoEXT*>( this );
}
operator VkIndirectExecutionSetPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectExecutionSetPipelineInfoEXT*>( this );
}
operator VkIndirectExecutionSetPipelineInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectExecutionSetPipelineInfoEXT*>( this );
}
operator VkIndirectExecutionSetPipelineInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectExecutionSetPipelineInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Pipeline const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, initialPipeline, maxPipelineCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( IndirectExecutionSetPipelineInfoEXT const & ) const = default;
#else
bool operator==( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( initialPipeline == rhs.initialPipeline )
&& ( maxPipelineCount == rhs.maxPipelineCount );
#endif
}
bool operator!=( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetPipelineInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Pipeline initialPipeline = {};
uint32_t maxPipelineCount = {};
};
template <>
struct CppType<StructureType, StructureType::eIndirectExecutionSetPipelineInfoEXT>
{
using Type = IndirectExecutionSetPipelineInfoEXT;
};
// wrapper struct for struct VkIndirectExecutionSetShaderLayoutInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetShaderLayoutInfoEXT.html
struct IndirectExecutionSetShaderLayoutInfoEXT
{
using NativeType = VkIndirectExecutionSetShaderLayoutInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT(uint32_t setLayoutCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, setLayoutCount{ setLayoutCount_ }, pSetLayouts{ pSetLayouts_ }
{}
VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectExecutionSetShaderLayoutInfoEXT( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectExecutionSetShaderLayoutInfoEXT( *reinterpret_cast<IndirectExecutionSetShaderLayoutInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectExecutionSetShaderLayoutInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_, const void * pNext_ = nullptr )
: pNext( pNext_ ), setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
IndirectExecutionSetShaderLayoutInfoEXT & operator=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectExecutionSetShaderLayoutInfoEXT & operator=( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
{
setLayoutCount = setLayoutCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pSetLayouts = pSetLayouts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectExecutionSetShaderLayoutInfoEXT & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
{
setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
pSetLayouts = setLayouts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectExecutionSetShaderLayoutInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectExecutionSetShaderLayoutInfoEXT*>( this );
}
operator VkIndirectExecutionSetShaderLayoutInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectExecutionSetShaderLayoutInfoEXT*>( this );
}
operator VkIndirectExecutionSetShaderLayoutInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectExecutionSetShaderLayoutInfoEXT*>( this );
}
operator VkIndirectExecutionSetShaderLayoutInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectExecutionSetShaderLayoutInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, setLayoutCount, pSetLayouts );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( IndirectExecutionSetShaderLayoutInfoEXT const & ) const = default;
#else
bool operator==( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( setLayoutCount == rhs.setLayoutCount )
&& ( pSetLayouts == rhs.pSetLayouts );
#endif
}
bool operator!=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT;
const void * pNext = {};
uint32_t setLayoutCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
};
template <>
struct CppType<StructureType, StructureType::eIndirectExecutionSetShaderLayoutInfoEXT>
{
using Type = IndirectExecutionSetShaderLayoutInfoEXT;
};
// wrapper struct for struct VkIndirectExecutionSetShaderInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetShaderInfoEXT.html
struct IndirectExecutionSetShaderInfoEXT
{
using NativeType = VkIndirectExecutionSetShaderInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT(uint32_t shaderCount_ = {}, const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ = {}, const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ = {}, uint32_t maxShaderCount_ = {}, uint32_t pushConstantRangeCount_ = {}, const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderCount{ shaderCount_ }, pInitialShaders{ pInitialShaders_ }, pSetLayoutInfos{ pSetLayoutInfos_ }, maxShaderCount{ maxShaderCount_ }, pushConstantRangeCount{ pushConstantRangeCount_ }, pPushConstantRanges{ pPushConstantRanges_ }
{}
VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectExecutionSetShaderInfoEXT( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectExecutionSetShaderInfoEXT( *reinterpret_cast<IndirectExecutionSetShaderInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectExecutionSetShaderInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & initialShaders_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT> const & setLayoutInfos_ = {}, uint32_t maxShaderCount_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), shaderCount( static_cast<uint32_t>( initialShaders_.size() ) ), pInitialShaders( initialShaders_.data() ), pSetLayoutInfos( setLayoutInfos_.data() ), maxShaderCount( maxShaderCount_ ), pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) ), pPushConstantRanges( pushConstantRanges_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( setLayoutInfos_.empty() || ( initialShaders_.size() == setLayoutInfos_.size() ) );
#else
if ( !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::IndirectExecutionSetShaderInfoEXT::IndirectExecutionSetShaderInfoEXT: !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() )" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
IndirectExecutionSetShaderInfoEXT & operator=( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectExecutionSetShaderInfoEXT & operator=( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT
{
shaderCount = shaderCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPInitialShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ ) VULKAN_HPP_NOEXCEPT
{
pInitialShaders = pInitialShaders_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectExecutionSetShaderInfoEXT & setInitialShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & initialShaders_ ) VULKAN_HPP_NOEXCEPT
{
shaderCount = static_cast<uint32_t>( initialShaders_.size() );
pInitialShaders = initialShaders_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPSetLayoutInfos( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ ) VULKAN_HPP_NOEXCEPT
{
pSetLayoutInfos = pSetLayoutInfos_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectExecutionSetShaderInfoEXT & setSetLayoutInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT> const & setLayoutInfos_ ) VULKAN_HPP_NOEXCEPT
{
shaderCount = static_cast<uint32_t>( setLayoutInfos_.size() );
pSetLayoutInfos = setLayoutInfos_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setMaxShaderCount( uint32_t maxShaderCount_ ) VULKAN_HPP_NOEXCEPT
{
maxShaderCount = maxShaderCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
{
pushConstantRangeCount = pushConstantRangeCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pPushConstantRanges = pPushConstantRanges_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectExecutionSetShaderInfoEXT & setPushConstantRanges( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
pPushConstantRanges = pushConstantRanges_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectExecutionSetShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectExecutionSetShaderInfoEXT*>( this );
}
operator VkIndirectExecutionSetShaderInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectExecutionSetShaderInfoEXT*>( this );
}
operator VkIndirectExecutionSetShaderInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectExecutionSetShaderInfoEXT*>( this );
}
operator VkIndirectExecutionSetShaderInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectExecutionSetShaderInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ShaderEXT * const &, const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PushConstantRange * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderCount, pInitialShaders, pSetLayoutInfos, maxShaderCount, pushConstantRangeCount, pPushConstantRanges );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( IndirectExecutionSetShaderInfoEXT const & ) const = default;
#else
bool operator==( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderCount == rhs.shaderCount )
&& ( pInitialShaders == rhs.pInitialShaders )
&& ( pSetLayoutInfos == rhs.pSetLayoutInfos )
&& ( maxShaderCount == rhs.maxShaderCount )
&& ( pushConstantRangeCount == rhs.pushConstantRangeCount )
&& ( pPushConstantRanges == rhs.pPushConstantRanges );
#endif
}
bool operator!=( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderInfoEXT;
const void * pNext = {};
uint32_t shaderCount = {};
const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders = {};
const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos = {};
uint32_t maxShaderCount = {};
uint32_t pushConstantRangeCount = {};
const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {};
};
template <>
struct CppType<StructureType, StructureType::eIndirectExecutionSetShaderInfoEXT>
{
using Type = IndirectExecutionSetShaderInfoEXT;
};
union IndirectExecutionSetInfoEXT
{
using NativeType = VkIndirectExecutionSetInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ = {} )
: pPipelineInfo( pPipelineInfo_ )
{}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ )
: pShaderInfo( pShaderInfo_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & setPPipelineInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ ) VULKAN_HPP_NOEXCEPT
{
pPipelineInfo = pPipelineInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & setPShaderInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) VULKAN_HPP_NOEXCEPT
{
pShaderInfo = pShaderInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectExecutionSetInfoEXT const &() const
{
return *reinterpret_cast<const VkIndirectExecutionSetInfoEXT*>( this );
}
operator VkIndirectExecutionSetInfoEXT &()
{
return *reinterpret_cast<VkIndirectExecutionSetInfoEXT*>( this );
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo;
const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo;
#else
const VkIndirectExecutionSetPipelineInfoEXT * pPipelineInfo;
const VkIndirectExecutionSetShaderInfoEXT * pShaderInfo;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
// wrapper struct for struct VkIndirectExecutionSetCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetCreateInfoEXT.html
struct IndirectExecutionSetCreateInfoEXT
{
using NativeType = VkIndirectExecutionSetCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT(VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, type{ type_ }, info{ info_ }
{}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectExecutionSetCreateInfoEXT( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectExecutionSetCreateInfoEXT( *reinterpret_cast<IndirectExecutionSetCreateInfoEXT const *>( &rhs ) )
{}
IndirectExecutionSetCreateInfoEXT & operator=( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
IndirectExecutionSetCreateInfoEXT & operator=( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setInfo( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT const & info_ ) VULKAN_HPP_NOEXCEPT
{
info = info_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkIndirectExecutionSetCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT*>( this );
}
operator VkIndirectExecutionSetCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectExecutionSetCreateInfoEXT*>( this );
}
operator VkIndirectExecutionSetCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT*>( this );
}
operator VkIndirectExecutionSetCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkIndirectExecutionSetCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT const &, VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, type, info );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines;
VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info = {};
};
template <>
struct CppType<StructureType, StructureType::eIndirectExecutionSetCreateInfoEXT>
{
using Type = IndirectExecutionSetCreateInfoEXT;
};
// wrapper struct for struct VkInitializePerformanceApiInfoINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkInitializePerformanceApiInfoINTEL.html
struct InitializePerformanceApiInfoINTEL
{
using NativeType = VkInitializePerformanceApiInfoINTEL;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL(void * pUserData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pUserData{ pUserData_ }
{}
VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: InitializePerformanceApiInfoINTEL( *reinterpret_cast<InitializePerformanceApiInfoINTEL const *>( &rhs ) )
{}
InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
{
pUserData = pUserData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( this );
}
operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkInitializePerformanceApiInfoINTEL*>( this );
}
operator VkInitializePerformanceApiInfoINTEL const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( this );
}
operator VkInitializePerformanceApiInfoINTEL *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkInitializePerformanceApiInfoINTEL*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pUserData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default;
#else
bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pUserData == rhs.pUserData );
#endif
}
bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL;
const void * pNext = {};
void * pUserData = {};
};
template <>
struct CppType<StructureType, StructureType::eInitializePerformanceApiInfoINTEL>
{
using Type = InitializePerformanceApiInfoINTEL;
};
// wrapper struct for struct VkInputAttachmentAspectReference, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkInputAttachmentAspectReference.html
struct InputAttachmentAspectReference
{
using NativeType = VkInputAttachmentAspectReference;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference(uint32_t subpass_ = {}, uint32_t inputAttachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT
: subpass{ subpass_ }, inputAttachmentIndex{ inputAttachmentIndex_ }, aspectMask{ aspectMask_ }
{}
VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
: InputAttachmentAspectReference( *reinterpret_cast<InputAttachmentAspectReference const *>( &rhs ) )
{}
InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
{
subpass = subpass_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
{
inputAttachmentIndex = inputAttachmentIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkInputAttachmentAspectReference*>( this );
}
operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkInputAttachmentAspectReference*>( this );
}
operator VkInputAttachmentAspectReference const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkInputAttachmentAspectReference*>( this );
}
operator VkInputAttachmentAspectReference *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkInputAttachmentAspectReference*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageAspectFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( subpass, inputAttachmentIndex, aspectMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( InputAttachmentAspectReference const & ) const = default;
#else
bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( subpass == rhs.subpass )
&& ( inputAttachmentIndex == rhs.inputAttachmentIndex )
&& ( aspectMask == rhs.aspectMask );
#endif
}
bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t subpass = {};
uint32_t inputAttachmentIndex = {};
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
};
using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
// wrapper struct for struct VkInstanceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkInstanceCreateInfo.html
struct InstanceCreateInfo
{
using NativeType = VkInstanceCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR InstanceCreateInfo(VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {}, uint32_t enabledLayerCount_ = {}, const char * const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char * const * ppEnabledExtensionNames_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pApplicationInfo{ pApplicationInfo_ }, enabledLayerCount{ enabledLayerCount_ }, ppEnabledLayerNames{ ppEnabledLayerNames_ }, enabledExtensionCount{ enabledExtensionCount_ }, ppEnabledExtensionNames{ ppEnabledExtensionNames_ }
{}
VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: InstanceCreateInfo( *reinterpret_cast<InstanceCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InstanceCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
{
pApplicationInfo = pApplicationInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
{
enabledLayerCount = enabledLayerCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
{
ppEnabledLayerNames = ppEnabledLayerNames_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
InstanceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
{
enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
ppEnabledLayerNames = pEnabledLayerNames_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
{
enabledExtensionCount = enabledExtensionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
{
ppEnabledExtensionNames = ppEnabledExtensionNames_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
InstanceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
{
enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
ppEnabledExtensionNames = pEnabledExtensionNames_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkInstanceCreateInfo*>( this );
}
operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkInstanceCreateInfo*>( this );
}
operator VkInstanceCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkInstanceCreateInfo*>( this );
}
operator VkInstanceCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkInstanceCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::InstanceCreateFlags const &, const VULKAN_HPP_NAMESPACE::ApplicationInfo * const &, uint32_t const &, const char * const * const &, uint32_t const &, const char * const * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pApplicationInfo, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = pApplicationInfo <=> rhs.pApplicationInfo; cmp != 0 ) return cmp;
if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) return cmp;
for ( size_t i = 0; i < enabledLayerCount; ++i )
{
if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] )
if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 )
return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
}
if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) return cmp;
for ( size_t i = 0; i < enabledExtensionCount; ++i )
{
if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] )
if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 )
return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
}
return std::strong_ordering::equivalent;
}
#endif
bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( pApplicationInfo == rhs.pApplicationInfo )
&& ( enabledLayerCount == rhs.enabledLayerCount )
&& std::equal( ppEnabledLayerNames, ppEnabledLayerNames + enabledLayerCount, rhs.ppEnabledLayerNames, []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } )
&& ( enabledExtensionCount == rhs.enabledExtensionCount )
&& std::equal( ppEnabledExtensionNames, ppEnabledExtensionNames + enabledExtensionCount, rhs.ppEnabledExtensionNames, []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } );
}
bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {};
const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo = {};
uint32_t enabledLayerCount = {};
const char * const * ppEnabledLayerNames = {};
uint32_t enabledExtensionCount = {};
const char * const * ppEnabledExtensionNames = {};
};
template <>
struct CppType<StructureType, StructureType::eInstanceCreateInfo>
{
using Type = InstanceCreateInfo;
};
// wrapper struct for struct VkLatencySleepInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySleepInfoNV.html
struct LatencySleepInfoNV
{
using NativeType = VkLatencySleepInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySleepInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR LatencySleepInfoNV(VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, signalSemaphore{ signalSemaphore_ }, value{ value_ }
{}
VULKAN_HPP_CONSTEXPR LatencySleepInfoNV( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
LatencySleepInfoNV( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: LatencySleepInfoNV( *reinterpret_cast<LatencySleepInfoNV const *>( &rhs ) )
{}
LatencySleepInfoNV & operator=( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
LatencySleepInfoNV & operator=( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LatencySleepInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setSignalSemaphore( VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphore = signalSemaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
{
value = value_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkLatencySleepInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkLatencySleepInfoNV*>( this );
}
operator VkLatencySleepInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkLatencySleepInfoNV*>( this );
}
operator VkLatencySleepInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkLatencySleepInfoNV*>( this );
}
operator VkLatencySleepInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkLatencySleepInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, signalSemaphore, value );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( LatencySleepInfoNV const & ) const = default;
#else
bool operator==( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( signalSemaphore == rhs.signalSemaphore )
&& ( value == rhs.value );
#endif
}
bool operator!=( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySleepInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore = {};
uint64_t value = {};
};
template <>
struct CppType<StructureType, StructureType::eLatencySleepInfoNV>
{
using Type = LatencySleepInfoNV;
};
// wrapper struct for struct VkLatencySleepModeInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySleepModeInfoNV.html
struct LatencySleepModeInfoNV
{
using NativeType = VkLatencySleepModeInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySleepModeInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV(VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost_ = {}, uint32_t minimumIntervalUs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, lowLatencyMode{ lowLatencyMode_ }, lowLatencyBoost{ lowLatencyBoost_ }, minimumIntervalUs{ minimumIntervalUs_ }
{}
VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
LatencySleepModeInfoNV( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: LatencySleepModeInfoNV( *reinterpret_cast<LatencySleepModeInfoNV const *>( &rhs ) )
{}
LatencySleepModeInfoNV & operator=( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
LatencySleepModeInfoNV & operator=( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyMode( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode_ ) VULKAN_HPP_NOEXCEPT
{
lowLatencyMode = lowLatencyMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyBoost( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost_ ) VULKAN_HPP_NOEXCEPT
{
lowLatencyBoost = lowLatencyBoost_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setMinimumIntervalUs( uint32_t minimumIntervalUs_ ) VULKAN_HPP_NOEXCEPT
{
minimumIntervalUs = minimumIntervalUs_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkLatencySleepModeInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkLatencySleepModeInfoNV*>( this );
}
operator VkLatencySleepModeInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkLatencySleepModeInfoNV*>( this );
}
operator VkLatencySleepModeInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkLatencySleepModeInfoNV*>( this );
}
operator VkLatencySleepModeInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkLatencySleepModeInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, lowLatencyMode, lowLatencyBoost, minimumIntervalUs );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( LatencySleepModeInfoNV const & ) const = default;
#else
bool operator==( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( lowLatencyMode == rhs.lowLatencyMode )
&& ( lowLatencyBoost == rhs.lowLatencyBoost )
&& ( minimumIntervalUs == rhs.minimumIntervalUs );
#endif
}
bool operator!=( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySleepModeInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode = {};
VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost = {};
uint32_t minimumIntervalUs = {};
};
template <>
struct CppType<StructureType, StructureType::eLatencySleepModeInfoNV>
{
using Type = LatencySleepModeInfoNV;
};
// wrapper struct for struct VkLatencySubmissionPresentIdNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySubmissionPresentIdNV.html
struct LatencySubmissionPresentIdNV
{
using NativeType = VkLatencySubmissionPresentIdNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySubmissionPresentIdNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV(uint64_t presentID_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentID{ presentID_ }
{}
VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
LatencySubmissionPresentIdNV( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT
: LatencySubmissionPresentIdNV( *reinterpret_cast<LatencySubmissionPresentIdNV const *>( &rhs ) )
{}
LatencySubmissionPresentIdNV & operator=( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
LatencySubmissionPresentIdNV & operator=( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT
{
presentID = presentID_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkLatencySubmissionPresentIdNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkLatencySubmissionPresentIdNV*>( this );
}
operator VkLatencySubmissionPresentIdNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkLatencySubmissionPresentIdNV*>( this );
}
operator VkLatencySubmissionPresentIdNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkLatencySubmissionPresentIdNV*>( this );
}
operator VkLatencySubmissionPresentIdNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkLatencySubmissionPresentIdNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentID );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( LatencySubmissionPresentIdNV const & ) const = default;
#else
bool operator==( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentID == rhs.presentID );
#endif
}
bool operator!=( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySubmissionPresentIdNV;
const void * pNext = {};
uint64_t presentID = {};
};
template <>
struct CppType<StructureType, StructureType::eLatencySubmissionPresentIdNV>
{
using Type = LatencySubmissionPresentIdNV;
};
// wrapper struct for struct VkLatencySurfaceCapabilitiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySurfaceCapabilitiesNV.html
struct LatencySurfaceCapabilitiesNV
{
using NativeType = VkLatencySurfaceCapabilitiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySurfaceCapabilitiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV(uint32_t presentModeCount_ = {}, VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentModeCount{ presentModeCount_ }, pPresentModes{ pPresentModes_ }
{}
VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
LatencySurfaceCapabilitiesNV( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: LatencySurfaceCapabilitiesNV( *reinterpret_cast<LatencySurfaceCapabilitiesNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
LatencySurfaceCapabilitiesNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_, const void * pNext_ = nullptr )
: pNext( pNext_ ), presentModeCount( static_cast<uint32_t>( presentModes_.size() ) ), pPresentModes( presentModes_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
LatencySurfaceCapabilitiesNV & operator=( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
LatencySurfaceCapabilitiesNV & operator=( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT
{
presentModeCount = presentModeCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPPresentModes( VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT
{
pPresentModes = pPresentModes_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
LatencySurfaceCapabilitiesNV & setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_ ) VULKAN_HPP_NOEXCEPT
{
presentModeCount = static_cast<uint32_t>( presentModes_.size() );
pPresentModes = presentModes_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkLatencySurfaceCapabilitiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkLatencySurfaceCapabilitiesNV*>( this );
}
operator VkLatencySurfaceCapabilitiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkLatencySurfaceCapabilitiesNV*>( this );
}
operator VkLatencySurfaceCapabilitiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkLatencySurfaceCapabilitiesNV*>( this );
}
operator VkLatencySurfaceCapabilitiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkLatencySurfaceCapabilitiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PresentModeKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentModeCount, pPresentModes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( LatencySurfaceCapabilitiesNV const & ) const = default;
#else
bool operator==( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentModeCount == rhs.presentModeCount )
&& ( pPresentModes == rhs.pPresentModes );
#endif
}
bool operator!=( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySurfaceCapabilitiesNV;
const void * pNext = {};
uint32_t presentModeCount = {};
VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {};
};
template <>
struct CppType<StructureType, StructureType::eLatencySurfaceCapabilitiesNV>
{
using Type = LatencySurfaceCapabilitiesNV;
};
// wrapper struct for struct VkLayerProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerProperties.html
struct LayerProperties
{
using NativeType = VkLayerProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 LayerProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & layerName_ = {}, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}) VULKAN_HPP_NOEXCEPT
: layerName{ layerName_ }, specVersion{ specVersion_ }, implementationVersion{ implementationVersion_ }, description{ description_ }
{}
VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: LayerProperties( *reinterpret_cast<LayerProperties const *>( &rhs ) )
{}
LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerProperties const *>( &rhs );
return *this;
}
operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkLayerProperties*>( this );
}
operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkLayerProperties*>( this );
}
operator VkLayerProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkLayerProperties*>( this );
}
operator VkLayerProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkLayerProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( layerName, specVersion, implementationVersion, description );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = strcmp( layerName, rhs.layerName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 ) return cmp;
if ( auto cmp = implementationVersion <=> rhs.implementationVersion; cmp != 0 ) return cmp;
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( strcmp( layerName, rhs.layerName ) == 0 )
&& ( specVersion == rhs.specVersion )
&& ( implementationVersion == rhs.implementationVersion )
&& ( strcmp( description, rhs.description ) == 0 );
}
bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layerName = {};
uint32_t specVersion = {};
uint32_t implementationVersion = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
};
// wrapper struct for struct VkLayerSettingEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerSettingEXT.html
struct LayerSettingEXT
{
using NativeType = VkLayerSettingEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR LayerSettingEXT(const char * pLayerName_ = {}, const char * pSettingName_ = {}, VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_ = VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT::eBool32, uint32_t valueCount_ = {}, const void * pValues_ = {}) VULKAN_HPP_NOEXCEPT
: pLayerName{ pLayerName_ }, pSettingName{ pSettingName_ }, type{ type_ }, valueCount{ valueCount_ }, pValues{ pValues_ }
{}
VULKAN_HPP_CONSTEXPR LayerSettingEXT( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
LayerSettingEXT( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: LayerSettingEXT( *reinterpret_cast<LayerSettingEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
// NOTE: you need to provide the type because VULKAN_HPP_NAMESPACE::Bool32 and uint32_t are indistinguishable!
LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & values_ )
: pLayerName( pLayerName_ )
, pSettingName( pSettingName_ )
, type( type_ )
, valueCount( static_cast<uint32_t>( values_.size() ) )
, pValues( values_.data() )
{
VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType<int32_t>(type) );
}
LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int64_t> const & values_ )
: pLayerName( pLayerName_ )
, pSettingName( pSettingName_ )
, type( type_ )
, valueCount( static_cast<uint32_t>( values_.size() ) )
, pValues( values_.data() )
{
VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType<int64_t>(type) );
}
LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & values_ )
: pLayerName( pLayerName_ )
, pSettingName( pSettingName_ )
, type( type_ )
, valueCount( static_cast<uint32_t>( values_.size() ) )
, pValues( values_.data() )
{
VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType<uint32_t>(type) );
}
LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ )
: pLayerName( pLayerName_ )
, pSettingName( pSettingName_ )
, type( type_ )
, valueCount( static_cast<uint32_t>( values_.size() ) )
, pValues( values_.data() )
{
VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType<uint64_t>(type) );
}
LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & values_ )
: pLayerName( pLayerName_ )
, pSettingName( pSettingName_ )
, type( type_ )
, valueCount( static_cast<uint32_t>( values_.size() ) )
, pValues( values_.data() )
{
VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType<float>(type) );
}
LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const double> const & values_ )
: pLayerName( pLayerName_ )
, pSettingName( pSettingName_ )
, type( type_ )
, valueCount( static_cast<uint32_t>( values_.size() ) )
, pValues( values_.data() )
{
VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType<double>(type) );
}
LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char *> const & values_ )
: pLayerName( pLayerName_ )
, pSettingName( pSettingName_ )
, type( type_ )
, valueCount( static_cast<uint32_t>( values_.size() ) )
, pValues( values_.data() )
{
VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType<char *>(type) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
LayerSettingEXT & operator=( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
LayerSettingEXT & operator=( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerSettingEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPLayerName( const char * pLayerName_ ) VULKAN_HPP_NOEXCEPT
{
pLayerName = pLayerName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPSettingName( const char * pSettingName_ ) VULKAN_HPP_NOEXCEPT
{
pSettingName = pSettingName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setType( VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setValueCount( uint32_t valueCount_ ) VULKAN_HPP_NOEXCEPT
{
valueCount = valueCount_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & values_ ) VULKAN_HPP_NOEXCEPT
{
valueCount = static_cast<uint32_t>( values_.size() );
pValues = values_.data();
return *this;
}
LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
{
valueCount = static_cast<uint32_t>( values_.size() );
pValues = values_.data();
return *this;
}
LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & values_ ) VULKAN_HPP_NOEXCEPT
{
valueCount = static_cast<uint32_t>( values_.size() );
pValues = values_.data();
return *this;
}
LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
{
valueCount = static_cast<uint32_t>( values_.size() );
pValues = values_.data();
return *this;
}
LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & values_ ) VULKAN_HPP_NOEXCEPT
{
valueCount = static_cast<uint32_t>( values_.size() );
pValues = values_.data();
return *this;
}
LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const double> const & values_ ) VULKAN_HPP_NOEXCEPT
{
valueCount = static_cast<uint32_t>( values_.size() );
pValues = values_.data();
return *this;
}
LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char *> const & values_ ) VULKAN_HPP_NOEXCEPT
{
valueCount = static_cast<uint32_t>( values_.size() );
pValues = values_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkLayerSettingEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkLayerSettingEXT*>( this );
}
operator VkLayerSettingEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkLayerSettingEXT*>( this );
}
operator VkLayerSettingEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkLayerSettingEXT*>( this );
}
operator VkLayerSettingEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkLayerSettingEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<const char * const &, const char * const &, VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT const &, uint32_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( pLayerName, pSettingName, type, valueCount, pValues );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( pLayerName != rhs.pLayerName )
if ( auto cmp = strcmp( pLayerName, rhs.pLayerName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( pSettingName != rhs.pSettingName )
if ( auto cmp = strcmp( pSettingName, rhs.pSettingName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = type <=> rhs.type; cmp != 0 ) return cmp;
if ( auto cmp = valueCount <=> rhs.valueCount; cmp != 0 ) return cmp;
if ( auto cmp = pValues <=> rhs.pValues; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( ( pLayerName == rhs.pLayerName ) || ( strcmp( pLayerName, rhs.pLayerName ) == 0 ) )
&& ( ( pSettingName == rhs.pSettingName ) || ( strcmp( pSettingName, rhs.pSettingName ) == 0 ) )
&& ( type == rhs.type )
&& ( valueCount == rhs.valueCount )
&& ( pValues == rhs.pValues );
}
bool operator!=( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
const char * pLayerName = {};
const char * pSettingName = {};
VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type = VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT::eBool32;
uint32_t valueCount = {};
const void * pValues = {};
};
// wrapper struct for struct VkLayerSettingsCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerSettingsCreateInfoEXT.html
struct LayerSettingsCreateInfoEXT
{
using NativeType = VkLayerSettingsCreateInfoEXT;
static const bool allowDuplicate = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLayerSettingsCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR LayerSettingsCreateInfoEXT(uint32_t settingCount_ = {}, const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, settingCount{ settingCount_ }, pSettings{ pSettings_ }
{}
VULKAN_HPP_CONSTEXPR LayerSettingsCreateInfoEXT( LayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
LayerSettingsCreateInfoEXT( VkLayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: LayerSettingsCreateInfoEXT( *reinterpret_cast<LayerSettingsCreateInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
LayerSettingsCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::LayerSettingEXT> const & settings_, const void * pNext_ = nullptr )
: pNext( pNext_ ), settingCount( static_cast<uint32_t>( settings_.size() ) ), pSettings( settings_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
LayerSettingsCreateInfoEXT & operator=( LayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
LayerSettingsCreateInfoEXT & operator=( VkLayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerSettingsCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setSettingCount( uint32_t settingCount_ ) VULKAN_HPP_NOEXCEPT
{
settingCount = settingCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setPSettings( const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings_ ) VULKAN_HPP_NOEXCEPT
{
pSettings = pSettings_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
LayerSettingsCreateInfoEXT & setSettings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::LayerSettingEXT> const & settings_ ) VULKAN_HPP_NOEXCEPT
{
settingCount = static_cast<uint32_t>( settings_.size() );
pSettings = settings_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkLayerSettingsCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkLayerSettingsCreateInfoEXT*>( this );
}
operator VkLayerSettingsCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkLayerSettingsCreateInfoEXT*>( this );
}
operator VkLayerSettingsCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkLayerSettingsCreateInfoEXT*>( this );
}
operator VkLayerSettingsCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkLayerSettingsCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::LayerSettingEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, settingCount, pSettings );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( LayerSettingsCreateInfoEXT const & ) const = default;
#else
bool operator==( LayerSettingsCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( settingCount == rhs.settingCount )
&& ( pSettings == rhs.pSettings );
#endif
}
bool operator!=( LayerSettingsCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLayerSettingsCreateInfoEXT;
const void * pNext = {};
uint32_t settingCount = {};
const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings = {};
};
template <>
struct CppType<StructureType, StructureType::eLayerSettingsCreateInfoEXT>
{
using Type = LayerSettingsCreateInfoEXT;
};
#if defined( VK_USE_PLATFORM_MACOS_MVK )
// wrapper struct for struct VkMacOSSurfaceCreateInfoMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMacOSSurfaceCreateInfoMVK.html
struct MacOSSurfaceCreateInfoMVK
{
using NativeType = VkMacOSSurfaceCreateInfoMVK;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, const void * pView_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pView{ pView_ }
{}
VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
: MacOSSurfaceCreateInfoMVK( *reinterpret_cast<MacOSSurfaceCreateInfoMVK const *>( &rhs ) )
{}
MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT
{
pView = pView_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( this );
}
operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK*>( this );
}
operator VkMacOSSurfaceCreateInfoMVK const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( this );
}
operator VkMacOSSurfaceCreateInfoMVK *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMacOSSurfaceCreateInfoMVK*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pView );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default;
#else
bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( pView == rhs.pView );
#endif
}
bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {};
const void * pView = {};
};
template <>
struct CppType<StructureType, StructureType::eMacosSurfaceCreateInfoMVK>
{
using Type = MacOSSurfaceCreateInfoMVK;
};
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
// wrapper struct for struct VkMappedMemoryRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMappedMemoryRange.html
struct MappedMemoryRange
{
using NativeType = VkMappedMemoryRange;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MappedMemoryRange(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memory{ memory_ }, offset{ offset_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
: MappedMemoryRange( *reinterpret_cast<MappedMemoryRange const *>( &rhs ) )
{}
MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MappedMemoryRange const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMappedMemoryRange*>( this );
}
operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMappedMemoryRange*>( this );
}
operator VkMappedMemoryRange const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMappedMemoryRange*>( this );
}
operator VkMappedMemoryRange *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMappedMemoryRange*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memory, offset, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MappedMemoryRange const & ) const = default;
#else
bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memory == rhs.memory )
&& ( offset == rhs.offset )
&& ( size == rhs.size );
#endif
}
bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
template <>
struct CppType<StructureType, StructureType::eMappedMemoryRange>
{
using Type = MappedMemoryRange;
};
// wrapper struct for struct VkMemoryAllocateFlagsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryAllocateFlagsInfo.html
struct MemoryAllocateFlagsInfo
{
using NativeType = VkMemoryAllocateFlagsInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo(VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, deviceMask{ deviceMask_ }
{}
VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryAllocateFlagsInfo( *reinterpret_cast<MemoryAllocateFlagsInfo const *>( &rhs ) )
{}
MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
{
deviceMask = deviceMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryAllocateFlagsInfo*>( this );
}
operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryAllocateFlagsInfo*>( this );
}
operator VkMemoryAllocateFlagsInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryAllocateFlagsInfo*>( this );
}
operator VkMemoryAllocateFlagsInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryAllocateFlagsInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MemoryAllocateFlags const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, deviceMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default;
#else
bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( deviceMask == rhs.deviceMask );
#endif
}
bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {};
uint32_t deviceMask = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryAllocateFlagsInfo>
{
using Type = MemoryAllocateFlagsInfo;
};
using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
// wrapper struct for struct VkMemoryAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryAllocateInfo.html
struct MemoryAllocateInfo
{
using NativeType = VkMemoryAllocateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryAllocateInfo(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, allocationSize{ allocationSize_ }, memoryTypeIndex{ memoryTypeIndex_ }
{}
VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryAllocateInfo( *reinterpret_cast<MemoryAllocateInfo const *>( &rhs ) )
{}
MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
{
allocationSize = allocationSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT
{
memoryTypeIndex = memoryTypeIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryAllocateInfo*>( this );
}
operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryAllocateInfo*>( this );
}
operator VkMemoryAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryAllocateInfo*>( this );
}
operator VkMemoryAllocateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryAllocateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, allocationSize, memoryTypeIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryAllocateInfo const & ) const = default;
#else
bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( allocationSize == rhs.allocationSize )
&& ( memoryTypeIndex == rhs.memoryTypeIndex );
#endif
}
bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
uint32_t memoryTypeIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryAllocateInfo>
{
using Type = MemoryAllocateInfo;
};
// wrapper struct for struct VkMemoryBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryBarrier.html
struct MemoryBarrier
{
using NativeType = VkMemoryBarrier;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcAccessMask{ srcAccessMask_ }, dstAccessMask{ dstAccessMask_ }
{}
VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) )
{}
MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryBarrier*>( this );
}
operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryBarrier*>( this );
}
operator VkMemoryBarrier const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryBarrier*>( this );
}
operator VkMemoryBarrier *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryBarrier*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcAccessMask, dstAccessMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryBarrier const & ) const = default;
#else
bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcAccessMask == rhs.srcAccessMask )
&& ( dstAccessMask == rhs.dstAccessMask );
#endif
}
bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryBarrier>
{
using Type = MemoryBarrier;
};
// wrapper struct for struct VkMemoryBarrierAccessFlags3KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryBarrierAccessFlags3KHR.html
struct MemoryBarrierAccessFlags3KHR
{
using NativeType = VkMemoryBarrierAccessFlags3KHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrierAccessFlags3KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryBarrierAccessFlags3KHR(VULKAN_HPP_NAMESPACE::AccessFlags3KHR srcAccessMask3_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags3KHR dstAccessMask3_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcAccessMask3{ srcAccessMask3_ }, dstAccessMask3{ dstAccessMask3_ }
{}
VULKAN_HPP_CONSTEXPR MemoryBarrierAccessFlags3KHR( MemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryBarrierAccessFlags3KHR( VkMemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryBarrierAccessFlags3KHR( *reinterpret_cast<MemoryBarrierAccessFlags3KHR const *>( &rhs ) )
{}
MemoryBarrierAccessFlags3KHR & operator=( MemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryBarrierAccessFlags3KHR & operator=( VkMemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrierAccessFlags3KHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setSrcAccessMask3( VULKAN_HPP_NAMESPACE::AccessFlags3KHR srcAccessMask3_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask3 = srcAccessMask3_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setDstAccessMask3( VULKAN_HPP_NAMESPACE::AccessFlags3KHR dstAccessMask3_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask3 = dstAccessMask3_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryBarrierAccessFlags3KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryBarrierAccessFlags3KHR*>( this );
}
operator VkMemoryBarrierAccessFlags3KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryBarrierAccessFlags3KHR*>( this );
}
operator VkMemoryBarrierAccessFlags3KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryBarrierAccessFlags3KHR*>( this );
}
operator VkMemoryBarrierAccessFlags3KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryBarrierAccessFlags3KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccessFlags3KHR const &, VULKAN_HPP_NAMESPACE::AccessFlags3KHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcAccessMask3, dstAccessMask3 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryBarrierAccessFlags3KHR const & ) const = default;
#else
bool operator==( MemoryBarrierAccessFlags3KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcAccessMask3 == rhs.srcAccessMask3 )
&& ( dstAccessMask3 == rhs.dstAccessMask3 );
#endif
}
bool operator!=( MemoryBarrierAccessFlags3KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrierAccessFlags3KHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccessFlags3KHR srcAccessMask3 = {};
VULKAN_HPP_NAMESPACE::AccessFlags3KHR dstAccessMask3 = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryBarrierAccessFlags3KHR>
{
using Type = MemoryBarrierAccessFlags3KHR;
};
// wrapper struct for struct VkMemoryDedicatedAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDedicatedAllocateInfo.html
struct MemoryDedicatedAllocateInfo
{
using NativeType = VkMemoryDedicatedAllocateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, image{ image_ }, buffer{ buffer_ }
{}
VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryDedicatedAllocateInfo( *reinterpret_cast<MemoryDedicatedAllocateInfo const *>( &rhs ) )
{}
MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo*>( this );
}
operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryDedicatedAllocateInfo*>( this );
}
operator VkMemoryDedicatedAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryDedicatedAllocateInfo*>( this );
}
operator VkMemoryDedicatedAllocateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryDedicatedAllocateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::Buffer const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, image, buffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default;
#else
bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( image == rhs.image )
&& ( buffer == rhs.buffer );
#endif
}
bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryDedicatedAllocateInfo>
{
using Type = MemoryDedicatedAllocateInfo;
};
using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
// wrapper struct for struct VkMemoryDedicatedAllocateInfoTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDedicatedAllocateInfoTensorARM.html
struct MemoryDedicatedAllocateInfoTensorARM
{
using NativeType = VkMemoryDedicatedAllocateInfoTensorARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfoTensorARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfoTensorARM(VULKAN_HPP_NAMESPACE::TensorARM tensor_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tensor{ tensor_ }
{}
VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfoTensorARM( MemoryDedicatedAllocateInfoTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryDedicatedAllocateInfoTensorARM( VkMemoryDedicatedAllocateInfoTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryDedicatedAllocateInfoTensorARM( *reinterpret_cast<MemoryDedicatedAllocateInfoTensorARM const *>( &rhs ) )
{}
MemoryDedicatedAllocateInfoTensorARM & operator=( MemoryDedicatedAllocateInfoTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryDedicatedAllocateInfoTensorARM & operator=( VkMemoryDedicatedAllocateInfoTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfoTensorARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfoTensorARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfoTensorARM & setTensor( VULKAN_HPP_NAMESPACE::TensorARM tensor_ ) VULKAN_HPP_NOEXCEPT
{
tensor = tensor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryDedicatedAllocateInfoTensorARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryDedicatedAllocateInfoTensorARM*>( this );
}
operator VkMemoryDedicatedAllocateInfoTensorARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryDedicatedAllocateInfoTensorARM*>( this );
}
operator VkMemoryDedicatedAllocateInfoTensorARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryDedicatedAllocateInfoTensorARM*>( this );
}
operator VkMemoryDedicatedAllocateInfoTensorARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryDedicatedAllocateInfoTensorARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TensorARM const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tensor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryDedicatedAllocateInfoTensorARM const & ) const = default;
#else
bool operator==( MemoryDedicatedAllocateInfoTensorARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tensor == rhs.tensor );
#endif
}
bool operator!=( MemoryDedicatedAllocateInfoTensorARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfoTensorARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TensorARM tensor = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryDedicatedAllocateInfoTensorARM>
{
using Type = MemoryDedicatedAllocateInfoTensorARM;
};
// wrapper struct for struct VkMemoryDedicatedRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDedicatedRequirements.html
struct MemoryDedicatedRequirements
{
using NativeType = VkMemoryDedicatedRequirements;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements(VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, prefersDedicatedAllocation{ prefersDedicatedAllocation_ }, requiresDedicatedAllocation{ requiresDedicatedAllocation_ }
{}
VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryDedicatedRequirements( *reinterpret_cast<MemoryDedicatedRequirements const *>( &rhs ) )
{}
MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const *>( &rhs );
return *this;
}
operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryDedicatedRequirements*>( this );
}
operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryDedicatedRequirements*>( this );
}
operator VkMemoryDedicatedRequirements const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryDedicatedRequirements*>( this );
}
operator VkMemoryDedicatedRequirements *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryDedicatedRequirements*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, prefersDedicatedAllocation, requiresDedicatedAllocation );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryDedicatedRequirements const & ) const = default;
#else
bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation )
&& ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
#endif
}
bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {};
VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryDedicatedRequirements>
{
using Type = MemoryDedicatedRequirements;
};
using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
// wrapper struct for struct VkMemoryFdPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryFdPropertiesKHR.html
struct MemoryFdPropertiesKHR
{
using NativeType = VkMemoryFdPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR(uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryTypeBits{ memoryTypeBits_ }
{}
VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryFdPropertiesKHR( *reinterpret_cast<MemoryFdPropertiesKHR const *>( &rhs ) )
{}
MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const *>( &rhs );
return *this;
}
operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryFdPropertiesKHR*>( this );
}
operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryFdPropertiesKHR*>( this );
}
operator VkMemoryFdPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryFdPropertiesKHR*>( this );
}
operator VkMemoryFdPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryFdPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryTypeBits );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryFdPropertiesKHR const & ) const = default;
#else
bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryTypeBits == rhs.memoryTypeBits );
#endif
}
bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR;
void * pNext = {};
uint32_t memoryTypeBits = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryFdPropertiesKHR>
{
using Type = MemoryFdPropertiesKHR;
};
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
// wrapper struct for struct VkMemoryGetAndroidHardwareBufferInfoANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetAndroidHardwareBufferInfoANDROID.html
struct MemoryGetAndroidHardwareBufferInfoANDROID
{
using NativeType = VkMemoryGetAndroidHardwareBufferInfoANDROID;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memory{ memory_ }
{}
VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryGetAndroidHardwareBufferInfoANDROID( *reinterpret_cast<MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
{}
MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
}
operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
}
operator VkMemoryGetAndroidHardwareBufferInfoANDROID const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
}
operator VkMemoryGetAndroidHardwareBufferInfoANDROID *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memory );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default;
#else
bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memory == rhs.memory );
#endif
}
bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID>
{
using Type = MemoryGetAndroidHardwareBufferInfoANDROID;
};
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
// wrapper struct for struct VkMemoryGetFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetFdInfoKHR.html
struct MemoryGetFdInfoKHR
{
using NativeType = VkMemoryGetFdInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memory{ memory_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryGetFdInfoKHR( *reinterpret_cast<MemoryGetFdInfoKHR const *>( &rhs ) )
{}
MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryGetFdInfoKHR*>( this );
}
operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryGetFdInfoKHR*>( this );
}
operator VkMemoryGetFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryGetFdInfoKHR*>( this );
}
operator VkMemoryGetFdInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryGetFdInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memory, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryGetFdInfoKHR const & ) const = default;
#else
bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memory == rhs.memory )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::eMemoryGetFdInfoKHR>
{
using Type = MemoryGetFdInfoKHR;
};
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkMemoryGetMetalHandleInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetMetalHandleInfoEXT.html
struct MemoryGetMetalHandleInfoEXT
{
using NativeType = VkMemoryGetMetalHandleInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetMetalHandleInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryGetMetalHandleInfoEXT(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memory{ memory_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR MemoryGetMetalHandleInfoEXT( MemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryGetMetalHandleInfoEXT( VkMemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryGetMetalHandleInfoEXT( *reinterpret_cast<MemoryGetMetalHandleInfoEXT const *>( &rhs ) )
{}
MemoryGetMetalHandleInfoEXT & operator=( MemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryGetMetalHandleInfoEXT & operator=( VkMemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryGetMetalHandleInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryGetMetalHandleInfoEXT*>( this );
}
operator VkMemoryGetMetalHandleInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryGetMetalHandleInfoEXT*>( this );
}
operator VkMemoryGetMetalHandleInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryGetMetalHandleInfoEXT*>( this );
}
operator VkMemoryGetMetalHandleInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryGetMetalHandleInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memory, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryGetMetalHandleInfoEXT const & ) const = default;
#else
bool operator==( MemoryGetMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memory == rhs.memory )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( MemoryGetMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetMetalHandleInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::eMemoryGetMetalHandleInfoEXT>
{
using Type = MemoryGetMetalHandleInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
// wrapper struct for struct VkMemoryGetRemoteAddressInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetRemoteAddressInfoNV.html
struct MemoryGetRemoteAddressInfoNV
{
using NativeType = VkMemoryGetRemoteAddressInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetRemoteAddressInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memory{ memory_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryGetRemoteAddressInfoNV( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryGetRemoteAddressInfoNV( *reinterpret_cast<MemoryGetRemoteAddressInfoNV const *>( &rhs ) )
{}
MemoryGetRemoteAddressInfoNV & operator=( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryGetRemoteAddressInfoNV & operator=( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV*>( this );
}
operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryGetRemoteAddressInfoNV*>( this );
}
operator VkMemoryGetRemoteAddressInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV*>( this );
}
operator VkMemoryGetRemoteAddressInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryGetRemoteAddressInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memory, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryGetRemoteAddressInfoNV const & ) const = default;
#else
bool operator==( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memory == rhs.memory )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetRemoteAddressInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::eMemoryGetRemoteAddressInfoNV>
{
using Type = MemoryGetRemoteAddressInfoNV;
};
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkMemoryGetWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetWin32HandleInfoKHR.html
struct MemoryGetWin32HandleInfoKHR
{
using NativeType = VkMemoryGetWin32HandleInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memory{ memory_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryGetWin32HandleInfoKHR( *reinterpret_cast<MemoryGetWin32HandleInfoKHR const *>( &rhs ) )
{}
MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( this );
}
operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR*>( this );
}
operator VkMemoryGetWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( this );
}
operator VkMemoryGetWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryGetWin32HandleInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memory, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default;
#else
bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memory == rhs.memory )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::eMemoryGetWin32HandleInfoKHR>
{
using Type = MemoryGetWin32HandleInfoKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkMemoryGetZirconHandleInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetZirconHandleInfoFUCHSIA.html
struct MemoryGetZirconHandleInfoFUCHSIA
{
using NativeType = VkMemoryGetZirconHandleInfoFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memory{ memory_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast<MemoryGetZirconHandleInfoFUCHSIA const *>( &rhs ) )
{}
MemoryGetZirconHandleInfoFUCHSIA & operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA*>( this );
}
operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryGetZirconHandleInfoFUCHSIA*>( this );
}
operator VkMemoryGetZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA*>( this );
}
operator VkMemoryGetZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryGetZirconHandleInfoFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memory, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default;
#else
bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memory == rhs.memory )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::eMemoryGetZirconHandleInfoFUCHSIA>
{
using Type = MemoryGetZirconHandleInfoFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
// wrapper struct for struct VkMemoryHeap, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryHeap.html
struct MemoryHeap
{
using NativeType = VkMemoryHeap;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryHeap(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
: size{ size_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryHeap( *reinterpret_cast<MemoryHeap const *>( &rhs ) )
{}
MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHeap const *>( &rhs );
return *this;
}
operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryHeap*>( this );
}
operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryHeap*>( this );
}
operator VkMemoryHeap const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryHeap*>( this );
}
operator VkMemoryHeap *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryHeap*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::MemoryHeapFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( size, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryHeap const & ) const = default;
#else
bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( size == rhs.size )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {};
};
// wrapper struct for struct VkMemoryHostPointerPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryHostPointerPropertiesEXT.html
struct MemoryHostPointerPropertiesEXT
{
using NativeType = VkMemoryHostPointerPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT(uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryTypeBits{ memoryTypeBits_ }
{}
VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryHostPointerPropertiesEXT( *reinterpret_cast<MemoryHostPointerPropertiesEXT const *>( &rhs ) )
{}
MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const *>( &rhs );
return *this;
}
operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT*>( this );
}
operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( this );
}
operator VkMemoryHostPointerPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryHostPointerPropertiesEXT*>( this );
}
operator VkMemoryHostPointerPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryTypeBits );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default;
#else
bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryTypeBits == rhs.memoryTypeBits );
#endif
}
bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
void * pNext = {};
uint32_t memoryTypeBits = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryHostPointerPropertiesEXT>
{
using Type = MemoryHostPointerPropertiesEXT;
};
// wrapper struct for struct VkMemoryMapInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMapInfo.html
struct MemoryMapInfo
{
using NativeType = VkMemoryMapInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMapInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryMapInfo(VULKAN_HPP_NAMESPACE::MemoryMapFlags flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, memory{ memory_ }, offset{ offset_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR MemoryMapInfo( MemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryMapInfo( VkMemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryMapInfo( *reinterpret_cast<MemoryMapInfo const *>( &rhs ) )
{}
MemoryMapInfo & operator=( MemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryMapInfo & operator=( VkMemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryMapInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryMapFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryMapInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryMapInfo*>( this );
}
operator VkMemoryMapInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryMapInfo*>( this );
}
operator VkMemoryMapInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryMapInfo*>( this );
}
operator VkMemoryMapInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryMapInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MemoryMapFlags const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, memory, offset, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryMapInfo const & ) const = default;
#else
bool operator==( MemoryMapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( memory == rhs.memory )
&& ( offset == rhs.offset )
&& ( size == rhs.size );
#endif
}
bool operator!=( MemoryMapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMapInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryMapInfo>
{
using Type = MemoryMapInfo;
};
using MemoryMapInfoKHR = MemoryMapInfo;
// wrapper struct for struct VkMemoryMapPlacedInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMapPlacedInfoEXT.html
struct MemoryMapPlacedInfoEXT
{
using NativeType = VkMemoryMapPlacedInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMapPlacedInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT(void * pPlacedAddress_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pPlacedAddress{ pPlacedAddress_ }
{}
VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT( MemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryMapPlacedInfoEXT( VkMemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryMapPlacedInfoEXT( *reinterpret_cast<MemoryMapPlacedInfoEXT const *>( &rhs ) )
{}
MemoryMapPlacedInfoEXT & operator=( MemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryMapPlacedInfoEXT & operator=( VkMemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT & setPPlacedAddress( void * pPlacedAddress_ ) VULKAN_HPP_NOEXCEPT
{
pPlacedAddress = pPlacedAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryMapPlacedInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryMapPlacedInfoEXT*>( this );
}
operator VkMemoryMapPlacedInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryMapPlacedInfoEXT*>( this );
}
operator VkMemoryMapPlacedInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryMapPlacedInfoEXT*>( this );
}
operator VkMemoryMapPlacedInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryMapPlacedInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pPlacedAddress );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryMapPlacedInfoEXT const & ) const = default;
#else
bool operator==( MemoryMapPlacedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pPlacedAddress == rhs.pPlacedAddress );
#endif
}
bool operator!=( MemoryMapPlacedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMapPlacedInfoEXT;
const void * pNext = {};
void * pPlacedAddress = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryMapPlacedInfoEXT>
{
using Type = MemoryMapPlacedInfoEXT;
};
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkMemoryMetalHandlePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMetalHandlePropertiesEXT.html
struct MemoryMetalHandlePropertiesEXT
{
using NativeType = VkMemoryMetalHandlePropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMetalHandlePropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryMetalHandlePropertiesEXT(uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryTypeBits{ memoryTypeBits_ }
{}
VULKAN_HPP_CONSTEXPR MemoryMetalHandlePropertiesEXT( MemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryMetalHandlePropertiesEXT( VkMemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryMetalHandlePropertiesEXT( *reinterpret_cast<MemoryMetalHandlePropertiesEXT const *>( &rhs ) )
{}
MemoryMetalHandlePropertiesEXT & operator=( MemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryMetalHandlePropertiesEXT & operator=( VkMemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT const *>( &rhs );
return *this;
}
operator VkMemoryMetalHandlePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryMetalHandlePropertiesEXT*>( this );
}
operator VkMemoryMetalHandlePropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryMetalHandlePropertiesEXT*>( this );
}
operator VkMemoryMetalHandlePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryMetalHandlePropertiesEXT*>( this );
}
operator VkMemoryMetalHandlePropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryMetalHandlePropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryTypeBits );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryMetalHandlePropertiesEXT const & ) const = default;
#else
bool operator==( MemoryMetalHandlePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryTypeBits == rhs.memoryTypeBits );
#endif
}
bool operator!=( MemoryMetalHandlePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMetalHandlePropertiesEXT;
void * pNext = {};
uint32_t memoryTypeBits = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryMetalHandlePropertiesEXT>
{
using Type = MemoryMetalHandlePropertiesEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
// wrapper struct for struct VkMemoryOpaqueCaptureAddressAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryOpaqueCaptureAddressAllocateInfo.html
struct MemoryOpaqueCaptureAddressAllocateInfo
{
using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo(uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, opaqueCaptureAddress{ opaqueCaptureAddress_ }
{}
VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryOpaqueCaptureAddressAllocateInfo( *reinterpret_cast<MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs ) )
{}
MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
{
opaqueCaptureAddress = opaqueCaptureAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
}
operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
}
operator VkMemoryOpaqueCaptureAddressAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
}
operator VkMemoryOpaqueCaptureAddressAllocateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, opaqueCaptureAddress );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default;
#else
bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
#endif
}
bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
const void * pNext = {};
uint64_t opaqueCaptureAddress = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryOpaqueCaptureAddressAllocateInfo>
{
using Type = MemoryOpaqueCaptureAddressAllocateInfo;
};
using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;
// wrapper struct for struct VkMemoryPriorityAllocateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryPriorityAllocateInfoEXT.html
struct MemoryPriorityAllocateInfoEXT
{
using NativeType = VkMemoryPriorityAllocateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT(float priority_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, priority{ priority_ }
{}
VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryPriorityAllocateInfoEXT( *reinterpret_cast<MemoryPriorityAllocateInfoEXT const *>( &rhs ) )
{}
MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT
{
priority = priority_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT*>( this );
}
operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT*>( this );
}
operator VkMemoryPriorityAllocateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT*>( this );
}
operator VkMemoryPriorityAllocateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryPriorityAllocateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, priority );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default;
#else
bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( priority == rhs.priority );
#endif
}
bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT;
const void * pNext = {};
float priority = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryPriorityAllocateInfoEXT>
{
using Type = MemoryPriorityAllocateInfoEXT;
};
// wrapper struct for struct VkMemoryRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryRequirements.html
struct MemoryRequirements
{
using NativeType = VkMemoryRequirements;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryRequirements(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
: size{ size_ }, alignment{ alignment_ }, memoryTypeBits{ memoryTypeBits_ }
{}
VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryRequirements( *reinterpret_cast<MemoryRequirements const *>( &rhs ) )
{}
MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements const *>( &rhs );
return *this;
}
operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryRequirements*>( this );
}
operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryRequirements*>( this );
}
operator VkMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryRequirements*>( this );
}
operator VkMemoryRequirements *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryRequirements*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( size, alignment, memoryTypeBits );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryRequirements const & ) const = default;
#else
bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( size == rhs.size )
&& ( alignment == rhs.alignment )
&& ( memoryTypeBits == rhs.memoryTypeBits );
#endif
}
bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
VULKAN_HPP_NAMESPACE::DeviceSize alignment = {};
uint32_t memoryTypeBits = {};
};
// wrapper struct for struct VkMemoryRequirements2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryRequirements2.html
struct MemoryRequirements2
{
using NativeType = VkMemoryRequirements2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryRequirements2(VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryRequirements{ memoryRequirements_ }
{}
VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryRequirements2( *reinterpret_cast<MemoryRequirements2 const *>( &rhs ) )
{}
MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements2 const *>( &rhs );
return *this;
}
operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryRequirements2*>( this );
}
operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryRequirements2*>( this );
}
operator VkMemoryRequirements2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryRequirements2*>( this );
}
operator VkMemoryRequirements2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryRequirements2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::MemoryRequirements const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryRequirements );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryRequirements2 const & ) const = default;
#else
bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryRequirements == rhs.memoryRequirements );
#endif
}
bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2;
void * pNext = {};
VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryRequirements2>
{
using Type = MemoryRequirements2;
};
using MemoryRequirements2KHR = MemoryRequirements2;
// wrapper struct for struct VkMemoryType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryType.html
struct MemoryType
{
using NativeType = VkMemoryType;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryType(VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {}) VULKAN_HPP_NOEXCEPT
: propertyFlags{ propertyFlags_ }, heapIndex{ heapIndex_ }
{}
VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryType( *reinterpret_cast<MemoryType const *>( &rhs ) )
{}
MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryType const *>( &rhs );
return *this;
}
operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryType*>( this );
}
operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryType*>( this );
}
operator VkMemoryType const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryType*>( this );
}
operator VkMemoryType *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryType*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::MemoryPropertyFlags const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( propertyFlags, heapIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryType const & ) const = default;
#else
bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( propertyFlags == rhs.propertyFlags )
&& ( heapIndex == rhs.heapIndex );
#endif
}
bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {};
uint32_t heapIndex = {};
};
// wrapper struct for struct VkMemoryUnmapInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryUnmapInfo.html
struct MemoryUnmapInfo
{
using NativeType = VkMemoryUnmapInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryUnmapInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryUnmapInfo(VULKAN_HPP_NAMESPACE::MemoryUnmapFlags flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, memory{ memory_ }
{}
VULKAN_HPP_CONSTEXPR MemoryUnmapInfo( MemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryUnmapInfo( VkMemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryUnmapInfo( *reinterpret_cast<MemoryUnmapInfo const *>( &rhs ) )
{}
MemoryUnmapInfo & operator=( MemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryUnmapInfo & operator=( VkMemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryUnmapInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryUnmapFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMemoryUnmapInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryUnmapInfo*>( this );
}
operator VkMemoryUnmapInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryUnmapInfo*>( this );
}
operator VkMemoryUnmapInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryUnmapInfo*>( this );
}
operator VkMemoryUnmapInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryUnmapInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MemoryUnmapFlags const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, memory );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryUnmapInfo const & ) const = default;
#else
bool operator==( MemoryUnmapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( memory == rhs.memory );
#endif
}
bool operator!=( MemoryUnmapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryUnmapInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::MemoryUnmapFlags flags = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryUnmapInfo>
{
using Type = MemoryUnmapInfo;
};
using MemoryUnmapInfoKHR = MemoryUnmapInfo;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkMemoryWin32HandlePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryWin32HandlePropertiesKHR.html
struct MemoryWin32HandlePropertiesKHR
{
using NativeType = VkMemoryWin32HandlePropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR(uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryTypeBits{ memoryTypeBits_ }
{}
VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryWin32HandlePropertiesKHR( *reinterpret_cast<MemoryWin32HandlePropertiesKHR const *>( &rhs ) )
{}
MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const *>( &rhs );
return *this;
}
operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR*>( this );
}
operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( this );
}
operator VkMemoryWin32HandlePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR*>( this );
}
operator VkMemoryWin32HandlePropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryTypeBits );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default;
#else
bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryTypeBits == rhs.memoryTypeBits );
#endif
}
bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR;
void * pNext = {};
uint32_t memoryTypeBits = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryWin32HandlePropertiesKHR>
{
using Type = MemoryWin32HandlePropertiesKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkMemoryZirconHandlePropertiesFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryZirconHandlePropertiesFUCHSIA.html
struct MemoryZirconHandlePropertiesFUCHSIA
{
using NativeType = VkMemoryZirconHandlePropertiesFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA(uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryTypeBits{ memoryTypeBits_ }
{}
VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast<MemoryZirconHandlePropertiesFUCHSIA const *>( &rhs ) )
{}
MemoryZirconHandlePropertiesFUCHSIA & operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MemoryZirconHandlePropertiesFUCHSIA & operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA const *>( &rhs );
return *this;
}
operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryZirconHandlePropertiesFUCHSIA*>( this );
}
operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA*>( this );
}
operator VkMemoryZirconHandlePropertiesFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMemoryZirconHandlePropertiesFUCHSIA*>( this );
}
operator VkMemoryZirconHandlePropertiesFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryTypeBits );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default;
#else
bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryTypeBits == rhs.memoryTypeBits );
#endif
}
bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA;
void * pNext = {};
uint32_t memoryTypeBits = {};
};
template <>
struct CppType<StructureType, StructureType::eMemoryZirconHandlePropertiesFUCHSIA>
{
using Type = MemoryZirconHandlePropertiesFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
// wrapper struct for struct VkMetalSurfaceCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMetalSurfaceCreateInfoEXT.html
struct MetalSurfaceCreateInfoEXT
{
using NativeType = VkMetalSurfaceCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, const CAMetalLayer * pLayer_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pLayer{ pLayer_ }
{}
VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MetalSurfaceCreateInfoEXT( *reinterpret_cast<MetalSurfaceCreateInfoEXT const *>( &rhs ) )
{}
MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT
{
pLayer = pLayer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( this );
}
operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMetalSurfaceCreateInfoEXT*>( this );
}
operator VkMetalSurfaceCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( this );
}
operator VkMetalSurfaceCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMetalSurfaceCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT const &, const CAMetalLayer * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pLayer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default;
#else
bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( pLayer == rhs.pLayer );
#endif
}
bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {};
const CAMetalLayer * pLayer = {};
};
template <>
struct CppType<StructureType, StructureType::eMetalSurfaceCreateInfoEXT>
{
using Type = MetalSurfaceCreateInfoEXT;
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
// wrapper struct for struct VkMicromapBuildInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapBuildInfoEXT.html
struct MicromapBuildInfoEXT
{
using NativeType = VkMicromapBuildInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT(VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap, VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild, VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ = {}, uint32_t usageCountsCount_ = {}, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, type{ type_ }, flags{ flags_ }, mode{ mode_ }, dstMicromap{ dstMicromap_ }, usageCountsCount{ usageCountsCount_ }, pUsageCounts{ pUsageCounts_ }, ppUsageCounts{ ppUsageCounts_ }, data{ data_ }, scratchData{ scratchData_ }, triangleArray{ triangleArray_ }, triangleArrayStride{ triangleArrayStride_ }
{}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MicromapBuildInfoEXT( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MicromapBuildInfoEXT( *reinterpret_cast<MicromapBuildInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_, VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_, VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_, VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), type( type_ ), flags( flags_ ), mode( mode_ ), dstMicromap( dstMicromap_ ), usageCountsCount( static_cast<uint32_t>( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) ), pUsageCounts( usageCounts_.data() ), ppUsageCounts( pUsageCounts_.data() ), data( data_ ), scratchData( scratchData_ ), triangleArray( triangleArray_ ), triangleArrayStride( triangleArrayStride_ )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1);
#else
if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::MicromapBuildInfoEXT::MicromapBuildInfoEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
MicromapBuildInfoEXT & operator=( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MicromapBuildInfoEXT & operator=( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setMode( VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setDstMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ ) VULKAN_HPP_NOEXCEPT
{
dstMicromap = dstMicromap_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT
{
usageCountsCount = usageCountsCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
{
pUsageCounts = pUsageCounts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
MicromapBuildInfoEXT & setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_ ) VULKAN_HPP_NOEXCEPT
{
usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
pUsageCounts = usageCounts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT
{
ppUsageCounts = ppUsageCounts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
MicromapBuildInfoEXT & setPUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
{
usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
ppUsageCounts = pUsageCounts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
{
data = data_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
{
scratchData = scratchData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArray( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & triangleArray_ ) VULKAN_HPP_NOEXCEPT
{
triangleArray = triangleArray_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArrayStride( VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ ) VULKAN_HPP_NOEXCEPT
{
triangleArrayStride = triangleArrayStride_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMicromapBuildInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMicromapBuildInfoEXT*>( this );
}
operator VkMicromapBuildInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMicromapBuildInfoEXT*>( this );
}
operator VkMicromapBuildInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMicromapBuildInfoEXT*>( this );
}
operator VkMicromapBuildInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMicromapBuildInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MicromapTypeEXT const &, VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT const &, VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT const &, VULKAN_HPP_NAMESPACE::MicromapEXT const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const &, const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, type, flags, mode, dstMicromap, usageCountsCount, pUsageCounts, ppUsageCounts, data, scratchData, triangleArray, triangleArrayStride );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap;
VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild;
VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap = {};
uint32_t usageCountsCount = {};
const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {};
const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray = {};
VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride = {};
};
template <>
struct CppType<StructureType, StructureType::eMicromapBuildInfoEXT>
{
using Type = MicromapBuildInfoEXT;
};
// wrapper struct for struct VkMicromapBuildSizesInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapBuildSizesInfoEXT.html
struct MicromapBuildSizesInfoEXT
{
using NativeType = VkMicromapBuildSizesInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildSizesInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT(VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 discardable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, micromapSize{ micromapSize_ }, buildScratchSize{ buildScratchSize_ }, discardable{ discardable_ }
{}
VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MicromapBuildSizesInfoEXT( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MicromapBuildSizesInfoEXT( *reinterpret_cast<MicromapBuildSizesInfoEXT const *>( &rhs ) )
{}
MicromapBuildSizesInfoEXT & operator=( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MicromapBuildSizesInfoEXT & operator=( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setMicromapSize( VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ ) VULKAN_HPP_NOEXCEPT
{
micromapSize = micromapSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT
{
buildScratchSize = buildScratchSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setDiscardable( VULKAN_HPP_NAMESPACE::Bool32 discardable_ ) VULKAN_HPP_NOEXCEPT
{
discardable = discardable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMicromapBuildSizesInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMicromapBuildSizesInfoEXT*>( this );
}
operator VkMicromapBuildSizesInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMicromapBuildSizesInfoEXT*>( this );
}
operator VkMicromapBuildSizesInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMicromapBuildSizesInfoEXT*>( this );
}
operator VkMicromapBuildSizesInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMicromapBuildSizesInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, micromapSize, buildScratchSize, discardable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MicromapBuildSizesInfoEXT const & ) const = default;
#else
bool operator==( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( micromapSize == rhs.micromapSize )
&& ( buildScratchSize == rhs.buildScratchSize )
&& ( discardable == rhs.discardable );
#endif
}
bool operator!=( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildSizesInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize micromapSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {};
VULKAN_HPP_NAMESPACE::Bool32 discardable = {};
};
template <>
struct CppType<StructureType, StructureType::eMicromapBuildSizesInfoEXT>
{
using Type = MicromapBuildSizesInfoEXT;
};
// wrapper struct for struct VkMicromapCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapCreateInfoEXT.html
struct MicromapCreateInfoEXT
{
using NativeType = VkMicromapCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT(VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, createFlags{ createFlags_ }, buffer{ buffer_ }, offset{ offset_ }, size{ size_ }, type{ type_ }, deviceAddress{ deviceAddress_ }
{}
VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MicromapCreateInfoEXT( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MicromapCreateInfoEXT( *reinterpret_cast<MicromapCreateInfoEXT const *>( &rhs ) )
{}
MicromapCreateInfoEXT & operator=( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MicromapCreateInfoEXT & operator=( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setCreateFlags( VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ ) VULKAN_HPP_NOEXCEPT
{
createFlags = createFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMicromapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMicromapCreateInfoEXT*>( this );
}
operator VkMicromapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMicromapCreateInfoEXT*>( this );
}
operator VkMicromapCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMicromapCreateInfoEXT*>( this );
}
operator VkMicromapCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMicromapCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::MicromapTypeEXT const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MicromapCreateInfoEXT const & ) const = default;
#else
bool operator==( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( createFlags == rhs.createFlags )
&& ( buffer == rhs.buffer )
&& ( offset == rhs.offset )
&& ( size == rhs.size )
&& ( type == rhs.type )
&& ( deviceAddress == rhs.deviceAddress );
#endif
}
bool operator!=( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap;
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
};
template <>
struct CppType<StructureType, StructureType::eMicromapCreateInfoEXT>
{
using Type = MicromapCreateInfoEXT;
};
// wrapper struct for struct VkMicromapTriangleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapTriangleEXT.html
struct MicromapTriangleEXT
{
using NativeType = VkMicromapTriangleEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MicromapTriangleEXT(uint32_t dataOffset_ = {}, uint16_t subdivisionLevel_ = {}, uint16_t format_ = {}) VULKAN_HPP_NOEXCEPT
: dataOffset{ dataOffset_ }, subdivisionLevel{ subdivisionLevel_ }, format{ format_ }
{}
VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MicromapTriangleEXT( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MicromapTriangleEXT( *reinterpret_cast<MicromapTriangleEXT const *>( &rhs ) )
{}
MicromapTriangleEXT & operator=( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MicromapTriangleEXT & operator=( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setDataOffset( uint32_t dataOffset_ ) VULKAN_HPP_NOEXCEPT
{
dataOffset = dataOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setSubdivisionLevel( uint16_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT
{
subdivisionLevel = subdivisionLevel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setFormat( uint16_t format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMicromapTriangleEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMicromapTriangleEXT*>( this );
}
operator VkMicromapTriangleEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMicromapTriangleEXT*>( this );
}
operator VkMicromapTriangleEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMicromapTriangleEXT*>( this );
}
operator VkMicromapTriangleEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMicromapTriangleEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint16_t const &, uint16_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( dataOffset, subdivisionLevel, format );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MicromapTriangleEXT const & ) const = default;
#else
bool operator==( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( dataOffset == rhs.dataOffset )
&& ( subdivisionLevel == rhs.subdivisionLevel )
&& ( format == rhs.format );
#endif
}
bool operator!=( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t dataOffset = {};
uint16_t subdivisionLevel = {};
uint16_t format = {};
};
// wrapper struct for struct VkMicromapVersionInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapVersionInfoEXT.html
struct MicromapVersionInfoEXT
{
using NativeType = VkMicromapVersionInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapVersionInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT(const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pVersionData{ pVersionData_ }
{}
VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MicromapVersionInfoEXT( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MicromapVersionInfoEXT( *reinterpret_cast<MicromapVersionInfoEXT const *>( &rhs ) )
{}
MicromapVersionInfoEXT & operator=( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MicromapVersionInfoEXT & operator=( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT
{
pVersionData = pVersionData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMicromapVersionInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMicromapVersionInfoEXT*>( this );
}
operator VkMicromapVersionInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMicromapVersionInfoEXT*>( this );
}
operator VkMicromapVersionInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMicromapVersionInfoEXT*>( this );
}
operator VkMicromapVersionInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMicromapVersionInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const uint8_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pVersionData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MicromapVersionInfoEXT const & ) const = default;
#else
bool operator==( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pVersionData == rhs.pVersionData );
#endif
}
bool operator!=( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapVersionInfoEXT;
const void * pNext = {};
const uint8_t * pVersionData = {};
};
template <>
struct CppType<StructureType, StructureType::eMicromapVersionInfoEXT>
{
using Type = MicromapVersionInfoEXT;
};
// wrapper struct for struct VkMultiDrawIndexedInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiDrawIndexedInfoEXT.html
struct MultiDrawIndexedInfoEXT
{
using NativeType = VkMultiDrawIndexedInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT(uint32_t firstIndex_ = {}, uint32_t indexCount_ = {}, int32_t vertexOffset_ = {}) VULKAN_HPP_NOEXCEPT
: firstIndex{ firstIndex_ }, indexCount{ indexCount_ }, vertexOffset{ vertexOffset_ }
{}
VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MultiDrawIndexedInfoEXT( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MultiDrawIndexedInfoEXT( *reinterpret_cast<MultiDrawIndexedInfoEXT const *>( &rhs ) )
{}
MultiDrawIndexedInfoEXT & operator=( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MultiDrawIndexedInfoEXT & operator=( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
{
firstIndex = firstIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
{
indexCount = indexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
{
vertexOffset = vertexOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMultiDrawIndexedInfoEXT*>( this );
}
operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMultiDrawIndexedInfoEXT*>( this );
}
operator VkMultiDrawIndexedInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMultiDrawIndexedInfoEXT*>( this );
}
operator VkMultiDrawIndexedInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMultiDrawIndexedInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( firstIndex, indexCount, vertexOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MultiDrawIndexedInfoEXT const & ) const = default;
#else
bool operator==( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( firstIndex == rhs.firstIndex )
&& ( indexCount == rhs.indexCount )
&& ( vertexOffset == rhs.vertexOffset );
#endif
}
bool operator!=( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t firstIndex = {};
uint32_t indexCount = {};
int32_t vertexOffset = {};
};
// wrapper struct for struct VkMultiDrawInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiDrawInfoEXT.html
struct MultiDrawInfoEXT
{
using NativeType = VkMultiDrawInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT(uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {}) VULKAN_HPP_NOEXCEPT
: firstVertex{ firstVertex_ }, vertexCount{ vertexCount_ }
{}
VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MultiDrawInfoEXT( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MultiDrawInfoEXT( *reinterpret_cast<MultiDrawInfoEXT const *>( &rhs ) )
{}
MultiDrawInfoEXT & operator=( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MultiDrawInfoEXT & operator=( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
{
firstVertex = firstVertex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
{
vertexCount = vertexCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMultiDrawInfoEXT*>( this );
}
operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMultiDrawInfoEXT*>( this );
}
operator VkMultiDrawInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMultiDrawInfoEXT*>( this );
}
operator VkMultiDrawInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMultiDrawInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( firstVertex, vertexCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MultiDrawInfoEXT const & ) const = default;
#else
bool operator==( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( firstVertex == rhs.firstVertex )
&& ( vertexCount == rhs.vertexCount );
#endif
}
bool operator!=( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t firstVertex = {};
uint32_t vertexCount = {};
};
// wrapper struct for struct VkMultisamplePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultisamplePropertiesEXT.html
struct MultisamplePropertiesEXT
{
using NativeType = VkMultisamplePropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxSampleLocationGridSize{ maxSampleLocationGridSize_ }
{}
VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MultisamplePropertiesEXT( *reinterpret_cast<MultisamplePropertiesEXT const *>( &rhs ) )
{}
MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const *>( &rhs );
return *this;
}
operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMultisamplePropertiesEXT*>( this );
}
operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMultisamplePropertiesEXT*>( this );
}
operator VkMultisamplePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMultisamplePropertiesEXT*>( this );
}
operator VkMultisamplePropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMultisamplePropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxSampleLocationGridSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MultisamplePropertiesEXT const & ) const = default;
#else
bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
#endif
}
bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
};
template <>
struct CppType<StructureType, StructureType::eMultisamplePropertiesEXT>
{
using Type = MultisamplePropertiesEXT;
};
// wrapper struct for struct VkMultisampledRenderToSingleSampledInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultisampledRenderToSingleSampledInfoEXT.html
struct MultisampledRenderToSingleSampledInfoEXT
{
using NativeType = VkMultisampledRenderToSingleSampledInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisampledRenderToSingleSampledInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, multisampledRenderToSingleSampledEnable{ multisampledRenderToSingleSampledEnable_ }, rasterizationSamples{ rasterizationSamples_ }
{}
VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MultisampledRenderToSingleSampledInfoEXT( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MultisampledRenderToSingleSampledInfoEXT( *reinterpret_cast<MultisampledRenderToSingleSampledInfoEXT const *>( &rhs ) )
{}
MultisampledRenderToSingleSampledInfoEXT & operator=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MultisampledRenderToSingleSampledInfoEXT & operator=( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setMultisampledRenderToSingleSampledEnable( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ ) VULKAN_HPP_NOEXCEPT
{
multisampledRenderToSingleSampledEnable = multisampledRenderToSingleSampledEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationSamples = rasterizationSamples_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMultisampledRenderToSingleSampledInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMultisampledRenderToSingleSampledInfoEXT*>( this );
}
operator VkMultisampledRenderToSingleSampledInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMultisampledRenderToSingleSampledInfoEXT*>( this );
}
operator VkMultisampledRenderToSingleSampledInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMultisampledRenderToSingleSampledInfoEXT*>( this );
}
operator VkMultisampledRenderToSingleSampledInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMultisampledRenderToSingleSampledInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, multisampledRenderToSingleSampledEnable, rasterizationSamples );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MultisampledRenderToSingleSampledInfoEXT const & ) const = default;
#else
bool operator==( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( multisampledRenderToSingleSampledEnable == rhs.multisampledRenderToSingleSampledEnable )
&& ( rasterizationSamples == rhs.rasterizationSamples );
#endif
}
bool operator!=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisampledRenderToSingleSampledInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable = {};
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
};
template <>
struct CppType<StructureType, StructureType::eMultisampledRenderToSingleSampledInfoEXT>
{
using Type = MultisampledRenderToSingleSampledInfoEXT;
};
// wrapper struct for struct VkMultiviewPerViewAttributesInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiviewPerViewAttributesInfoNVX.html
struct MultiviewPerViewAttributesInfoNVX
{
using NativeType = VkMultiviewPerViewAttributesInfoNVX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewAttributesInfoNVX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX(VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, perViewAttributes{ perViewAttributes_ }, perViewAttributesPositionXOnly{ perViewAttributesPositionXOnly_ }
{}
VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MultiviewPerViewAttributesInfoNVX( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: MultiviewPerViewAttributesInfoNVX( *reinterpret_cast<MultiviewPerViewAttributesInfoNVX const *>( &rhs ) )
{}
MultiviewPerViewAttributesInfoNVX & operator=( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MultiviewPerViewAttributesInfoNVX & operator=( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributes( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ ) VULKAN_HPP_NOEXCEPT
{
perViewAttributes = perViewAttributes_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributesPositionXOnly( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ ) VULKAN_HPP_NOEXCEPT
{
perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMultiviewPerViewAttributesInfoNVX*>( this );
}
operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMultiviewPerViewAttributesInfoNVX*>( this );
}
operator VkMultiviewPerViewAttributesInfoNVX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMultiviewPerViewAttributesInfoNVX*>( this );
}
operator VkMultiviewPerViewAttributesInfoNVX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMultiviewPerViewAttributesInfoNVX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, perViewAttributes, perViewAttributesPositionXOnly );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MultiviewPerViewAttributesInfoNVX const & ) const = default;
#else
bool operator==( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( perViewAttributes == rhs.perViewAttributes )
&& ( perViewAttributesPositionXOnly == rhs.perViewAttributesPositionXOnly );
#endif
}
bool operator!=( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewAttributesInfoNVX;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes = {};
VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly = {};
};
template <>
struct CppType<StructureType, StructureType::eMultiviewPerViewAttributesInfoNVX>
{
using Type = MultiviewPerViewAttributesInfoNVX;
};
// wrapper struct for struct VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM.html
struct MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM
{
using NativeType = VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM(uint32_t perViewRenderAreaCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, perViewRenderAreaCount{ perViewRenderAreaCount_ }, pPerViewRenderAreas{ pPerViewRenderAreas_ }
{}
VULKAN_HPP_CONSTEXPR MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( *reinterpret_cast<MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & perViewRenderAreas_, const void * pNext_ = nullptr )
: pNext( pNext_ ), perViewRenderAreaCount( static_cast<uint32_t>( perViewRenderAreas_.size() ) ), pPerViewRenderAreas( perViewRenderAreas_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & operator=( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & operator=( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPerViewRenderAreaCount( uint32_t perViewRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
{
perViewRenderAreaCount = perViewRenderAreaCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPPerViewRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT
{
pPerViewRenderAreas = pPerViewRenderAreas_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPerViewRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & perViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT
{
perViewRenderAreaCount = static_cast<uint32_t>( perViewRenderAreas_.size() );
pPerViewRenderAreas = perViewRenderAreas_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM*>( this );
}
operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM*>( this );
}
operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM*>( this );
}
operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, perViewRenderAreaCount, pPerViewRenderAreas );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & ) const = default;
#else
bool operator==( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( perViewRenderAreaCount == rhs.perViewRenderAreaCount )
&& ( pPerViewRenderAreas == rhs.pPerViewRenderAreas );
#endif
}
bool operator!=( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM;
const void * pNext = {};
uint32_t perViewRenderAreaCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas = {};
};
template <>
struct CppType<StructureType, StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM>
{
using Type = MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM;
};
// wrapper struct for struct VkMutableDescriptorTypeListEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMutableDescriptorTypeListEXT.html
struct MutableDescriptorTypeListEXT
{
using NativeType = VkMutableDescriptorTypeListEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT(uint32_t descriptorTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {}) VULKAN_HPP_NOEXCEPT
: descriptorTypeCount{ descriptorTypeCount_ }, pDescriptorTypes{ pDescriptorTypes_ }
{}
VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MutableDescriptorTypeListEXT( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MutableDescriptorTypeListEXT( *reinterpret_cast<MutableDescriptorTypeListEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
MutableDescriptorTypeListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const & descriptorTypes_ )
: descriptorTypeCount( static_cast<uint32_t>( descriptorTypes_.size() ) ), pDescriptorTypes( descriptorTypes_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
MutableDescriptorTypeListEXT & operator=( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MutableDescriptorTypeListEXT & operator=( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorTypeCount = descriptorTypeCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT
{
pDescriptorTypes = pDescriptorTypes_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
MutableDescriptorTypeListEXT & setDescriptorTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const & descriptorTypes_ ) VULKAN_HPP_NOEXCEPT
{
descriptorTypeCount = static_cast<uint32_t>( descriptorTypes_.size() );
pDescriptorTypes = descriptorTypes_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMutableDescriptorTypeListEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMutableDescriptorTypeListEXT*>( this );
}
operator VkMutableDescriptorTypeListEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMutableDescriptorTypeListEXT*>( this );
}
operator VkMutableDescriptorTypeListEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMutableDescriptorTypeListEXT*>( this );
}
operator VkMutableDescriptorTypeListEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMutableDescriptorTypeListEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorType * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( descriptorTypeCount, pDescriptorTypes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MutableDescriptorTypeListEXT const & ) const = default;
#else
bool operator==( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( descriptorTypeCount == rhs.descriptorTypeCount )
&& ( pDescriptorTypes == rhs.pDescriptorTypes );
#endif
}
bool operator!=( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t descriptorTypeCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes = {};
};
using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT;
// wrapper struct for struct VkMutableDescriptorTypeCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMutableDescriptorTypeCreateInfoEXT.html
struct MutableDescriptorTypeCreateInfoEXT
{
using NativeType = VkMutableDescriptorTypeCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMutableDescriptorTypeCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT(uint32_t mutableDescriptorTypeListCount_ = {}, const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, mutableDescriptorTypeListCount{ mutableDescriptorTypeListCount_ }, pMutableDescriptorTypeLists{ pMutableDescriptorTypeLists_ }
{}
VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MutableDescriptorTypeCreateInfoEXT( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MutableDescriptorTypeCreateInfoEXT( *reinterpret_cast<MutableDescriptorTypeCreateInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
MutableDescriptorTypeCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT> const & mutableDescriptorTypeLists_, const void * pNext_ = nullptr )
: pNext( pNext_ ), mutableDescriptorTypeListCount( static_cast<uint32_t>( mutableDescriptorTypeLists_.size() ) ), pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
MutableDescriptorTypeCreateInfoEXT & operator=( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
MutableDescriptorTypeCreateInfoEXT & operator=( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT
{
mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setPMutableDescriptorTypeLists( const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
{
pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
MutableDescriptorTypeCreateInfoEXT & setMutableDescriptorTypeLists( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT> const & mutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
{
mutableDescriptorTypeListCount = static_cast<uint32_t>( mutableDescriptorTypeLists_.size() );
pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkMutableDescriptorTypeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMutableDescriptorTypeCreateInfoEXT*>( this );
}
operator VkMutableDescriptorTypeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMutableDescriptorTypeCreateInfoEXT*>( this );
}
operator VkMutableDescriptorTypeCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkMutableDescriptorTypeCreateInfoEXT*>( this );
}
operator VkMutableDescriptorTypeCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkMutableDescriptorTypeCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, mutableDescriptorTypeListCount, pMutableDescriptorTypeLists );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( MutableDescriptorTypeCreateInfoEXT const & ) const = default;
#else
bool operator==( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount )
&& ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists );
#endif
}
bool operator!=( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoEXT;
const void * pNext = {};
uint32_t mutableDescriptorTypeListCount = {};
const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists = {};
};
template <>
struct CppType<StructureType, StructureType::eMutableDescriptorTypeCreateInfoEXT>
{
using Type = MutableDescriptorTypeCreateInfoEXT;
};
using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT;
// wrapper struct for struct VkOpaqueCaptureDescriptorDataCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpaqueCaptureDescriptorDataCreateInfoEXT.html
struct OpaqueCaptureDescriptorDataCreateInfoEXT
{
using NativeType = VkOpaqueCaptureDescriptorDataCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT(const void * opaqueCaptureDescriptorData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, opaqueCaptureDescriptorData{ opaqueCaptureDescriptorData_ }
{}
VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
OpaqueCaptureDescriptorDataCreateInfoEXT( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: OpaqueCaptureDescriptorDataCreateInfoEXT( *reinterpret_cast<OpaqueCaptureDescriptorDataCreateInfoEXT const *>( &rhs ) )
{}
OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & setOpaqueCaptureDescriptorData( const void * opaqueCaptureDescriptorData_ ) VULKAN_HPP_NOEXCEPT
{
opaqueCaptureDescriptorData = opaqueCaptureDescriptorData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkOpaqueCaptureDescriptorDataCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOpaqueCaptureDescriptorDataCreateInfoEXT*>( this );
}
operator VkOpaqueCaptureDescriptorDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkOpaqueCaptureDescriptorDataCreateInfoEXT*>( this );
}
operator VkOpaqueCaptureDescriptorDataCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkOpaqueCaptureDescriptorDataCreateInfoEXT*>( this );
}
operator VkOpaqueCaptureDescriptorDataCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkOpaqueCaptureDescriptorDataCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, opaqueCaptureDescriptorData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( OpaqueCaptureDescriptorDataCreateInfoEXT const & ) const = default;
#else
bool operator==( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( opaqueCaptureDescriptorData == rhs.opaqueCaptureDescriptorData );
#endif
}
bool operator!=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT;
const void * pNext = {};
const void * opaqueCaptureDescriptorData = {};
};
template <>
struct CppType<StructureType, StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT>
{
using Type = OpaqueCaptureDescriptorDataCreateInfoEXT;
};
// wrapper struct for struct VkOpticalFlowExecuteInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowExecuteInfoNV.html
struct OpticalFlowExecuteInfoNV
{
using NativeType = VkOpticalFlowExecuteInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowExecuteInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV(VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, regionCount{ regionCount_ }, pRegions{ pRegions_ }
{}
VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
OpticalFlowExecuteInfoNV( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: OpticalFlowExecuteInfoNV( *reinterpret_cast<OpticalFlowExecuteInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
OpticalFlowExecuteInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & regions_, void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
OpticalFlowExecuteInfoNV & operator=( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
OpticalFlowExecuteInfoNV & operator=( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = regionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
OpticalFlowExecuteInfoNV & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkOpticalFlowExecuteInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOpticalFlowExecuteInfoNV*>( this );
}
operator VkOpticalFlowExecuteInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkOpticalFlowExecuteInfoNV*>( this );
}
operator VkOpticalFlowExecuteInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkOpticalFlowExecuteInfoNV*>( this );
}
operator VkOpticalFlowExecuteInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkOpticalFlowExecuteInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, regionCount, pRegions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( OpticalFlowExecuteInfoNV const & ) const = default;
#else
bool operator==( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( regionCount == rhs.regionCount )
&& ( pRegions == rhs.pRegions );
#endif
}
bool operator!=( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowExecuteInfoNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags = {};
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D * pRegions = {};
};
template <>
struct CppType<StructureType, StructureType::eOpticalFlowExecuteInfoNV>
{
using Type = OpticalFlowExecuteInfoNV;
};
// wrapper struct for struct VkOpticalFlowImageFormatInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowImageFormatInfoNV.html
struct OpticalFlowImageFormatInfoNV
{
using NativeType = VkOpticalFlowImageFormatInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV(VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, usage{ usage_ }
{}
VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
OpticalFlowImageFormatInfoNV( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: OpticalFlowImageFormatInfoNV( *reinterpret_cast<OpticalFlowImageFormatInfoNV const *>( &rhs ) )
{}
OpticalFlowImageFormatInfoNV & operator=( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
OpticalFlowImageFormatInfoNV & operator=( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setUsage( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkOpticalFlowImageFormatInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOpticalFlowImageFormatInfoNV*>( this );
}
operator VkOpticalFlowImageFormatInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkOpticalFlowImageFormatInfoNV*>( this );
}
operator VkOpticalFlowImageFormatInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkOpticalFlowImageFormatInfoNV*>( this );
}
operator VkOpticalFlowImageFormatInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkOpticalFlowImageFormatInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, usage );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( OpticalFlowImageFormatInfoNV const & ) const = default;
#else
bool operator==( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( usage == rhs.usage );
#endif
}
bool operator!=( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage = {};
};
template <>
struct CppType<StructureType, StructureType::eOpticalFlowImageFormatInfoNV>
{
using Type = OpticalFlowImageFormatInfoNV;
};
// wrapper struct for struct VkOpticalFlowImageFormatPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowImageFormatPropertiesNV.html
struct OpticalFlowImageFormatPropertiesNV
{
using NativeType = VkOpticalFlowImageFormatPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, format{ format_ }
{}
VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
OpticalFlowImageFormatPropertiesNV( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: OpticalFlowImageFormatPropertiesNV( *reinterpret_cast<OpticalFlowImageFormatPropertiesNV const *>( &rhs ) )
{}
OpticalFlowImageFormatPropertiesNV & operator=( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
OpticalFlowImageFormatPropertiesNV & operator=( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV const *>( &rhs );
return *this;
}
operator VkOpticalFlowImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOpticalFlowImageFormatPropertiesNV*>( this );
}
operator VkOpticalFlowImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV*>( this );
}
operator VkOpticalFlowImageFormatPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkOpticalFlowImageFormatPropertiesNV*>( this );
}
operator VkOpticalFlowImageFormatPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, format );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( OpticalFlowImageFormatPropertiesNV const & ) const = default;
#else
bool operator==( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( format == rhs.format );
#endif
}
bool operator!=( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatPropertiesNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::eOpticalFlowImageFormatPropertiesNV>
{
using Type = OpticalFlowImageFormatPropertiesNV;
};
// wrapper struct for struct VkOpticalFlowSessionCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionCreateInfoNV.html
struct OpticalFlowSessionCreateInfoNV
{
using NativeType = VkOpticalFlowSessionCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV(uint32_t width_ = {}, uint32_t height_ = {}, VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format costFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ = {}, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ = {}, VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown, VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, width{ width_ }, height{ height_ }, imageFormat{ imageFormat_ }, flowVectorFormat{ flowVectorFormat_ }, costFormat{ costFormat_ }, outputGridSize{ outputGridSize_ }, hintGridSize{ hintGridSize_ }, performanceLevel{ performanceLevel_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
OpticalFlowSessionCreateInfoNV( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: OpticalFlowSessionCreateInfoNV( *reinterpret_cast<OpticalFlowSessionCreateInfoNV const *>( &rhs ) )
{}
OpticalFlowSessionCreateInfoNV & operator=( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
OpticalFlowSessionCreateInfoNV & operator=( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
{
width = width_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
{
height = height_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
{
imageFormat = imageFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlowVectorFormat( VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ ) VULKAN_HPP_NOEXCEPT
{
flowVectorFormat = flowVectorFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setCostFormat( VULKAN_HPP_NAMESPACE::Format costFormat_ ) VULKAN_HPP_NOEXCEPT
{
costFormat = costFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setOutputGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ ) VULKAN_HPP_NOEXCEPT
{
outputGridSize = outputGridSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHintGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ ) VULKAN_HPP_NOEXCEPT
{
hintGridSize = hintGridSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPerformanceLevel( VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ ) VULKAN_HPP_NOEXCEPT
{
performanceLevel = performanceLevel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkOpticalFlowSessionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV*>( this );
}
operator VkOpticalFlowSessionCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkOpticalFlowSessionCreateInfoNV*>( this );
}
operator VkOpticalFlowSessionCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV*>( this );
}
operator VkOpticalFlowSessionCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkOpticalFlowSessionCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &, VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV const &, VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, width, height, imageFormat, flowVectorFormat, costFormat, outputGridSize, hintGridSize, performanceLevel, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( OpticalFlowSessionCreateInfoNV const & ) const = default;
#else
bool operator==( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( width == rhs.width )
&& ( height == rhs.height )
&& ( imageFormat == rhs.imageFormat )
&& ( flowVectorFormat == rhs.flowVectorFormat )
&& ( costFormat == rhs.costFormat )
&& ( outputGridSize == rhs.outputGridSize )
&& ( hintGridSize == rhs.hintGridSize )
&& ( performanceLevel == rhs.performanceLevel )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreateInfoNV;
void * pNext = {};
uint32_t width = {};
uint32_t height = {};
VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::Format flowVectorFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::Format costFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize = {};
VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize = {};
VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown;
VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags = {};
};
template <>
struct CppType<StructureType, StructureType::eOpticalFlowSessionCreateInfoNV>
{
using Type = OpticalFlowSessionCreateInfoNV;
};
// wrapper struct for struct VkOpticalFlowSessionCreatePrivateDataInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionCreatePrivateDataInfoNV.html
struct OpticalFlowSessionCreatePrivateDataInfoNV
{
using NativeType = VkOpticalFlowSessionCreatePrivateDataInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV(uint32_t id_ = {}, uint32_t size_ = {}, const void * pPrivateData_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, id{ id_ }, size{ size_ }, pPrivateData{ pPrivateData_ }
{}
VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
OpticalFlowSessionCreatePrivateDataInfoNV( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: OpticalFlowSessionCreatePrivateDataInfoNV( *reinterpret_cast<OpticalFlowSessionCreatePrivateDataInfoNV const *>( &rhs ) )
{}
OpticalFlowSessionCreatePrivateDataInfoNV & operator=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
OpticalFlowSessionCreatePrivateDataInfoNV & operator=( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setId( uint32_t id_ ) VULKAN_HPP_NOEXCEPT
{
id = id_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPPrivateData( const void * pPrivateData_ ) VULKAN_HPP_NOEXCEPT
{
pPrivateData = pPrivateData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkOpticalFlowSessionCreatePrivateDataInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOpticalFlowSessionCreatePrivateDataInfoNV*>( this );
}
operator VkOpticalFlowSessionCreatePrivateDataInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkOpticalFlowSessionCreatePrivateDataInfoNV*>( this );
}
operator VkOpticalFlowSessionCreatePrivateDataInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkOpticalFlowSessionCreatePrivateDataInfoNV*>( this );
}
operator VkOpticalFlowSessionCreatePrivateDataInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkOpticalFlowSessionCreatePrivateDataInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, id, size, pPrivateData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( OpticalFlowSessionCreatePrivateDataInfoNV const & ) const = default;
#else
bool operator==( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( id == rhs.id )
&& ( size == rhs.size )
&& ( pPrivateData == rhs.pPrivateData );
#endif
}
bool operator!=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV;
void * pNext = {};
uint32_t id = {};
uint32_t size = {};
const void * pPrivateData = {};
};
template <>
struct CppType<StructureType, StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV>
{
using Type = OpticalFlowSessionCreatePrivateDataInfoNV;
};
// wrapper struct for struct VkOutOfBandQueueTypeInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOutOfBandQueueTypeInfoNV.html
struct OutOfBandQueueTypeInfoNV
{
using NativeType = VkOutOfBandQueueTypeInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOutOfBandQueueTypeInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV(VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType_ = VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV::eRender, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, queueType{ queueType_ }
{}
VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
OutOfBandQueueTypeInfoNV( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: OutOfBandQueueTypeInfoNV( *reinterpret_cast<OutOfBandQueueTypeInfoNV const *>( &rhs ) )
{}
OutOfBandQueueTypeInfoNV & operator=( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
OutOfBandQueueTypeInfoNV & operator=( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setQueueType( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType_ ) VULKAN_HPP_NOEXCEPT
{
queueType = queueType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkOutOfBandQueueTypeInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOutOfBandQueueTypeInfoNV*>( this );
}
operator VkOutOfBandQueueTypeInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkOutOfBandQueueTypeInfoNV*>( this );
}
operator VkOutOfBandQueueTypeInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkOutOfBandQueueTypeInfoNV*>( this );
}
operator VkOutOfBandQueueTypeInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkOutOfBandQueueTypeInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, queueType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( OutOfBandQueueTypeInfoNV const & ) const = default;
#else
bool operator==( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( queueType == rhs.queueType );
#endif
}
bool operator!=( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOutOfBandQueueTypeInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType = VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV::eRender;
};
template <>
struct CppType<StructureType, StructureType::eOutOfBandQueueTypeInfoNV>
{
using Type = OutOfBandQueueTypeInfoNV;
};
// wrapper struct for struct VkPartitionedAccelerationStructureFlagsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureFlagsNV.html
struct PartitionedAccelerationStructureFlagsNV
{
using NativeType = VkPartitionedAccelerationStructureFlagsNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePartitionedAccelerationStructureFlagsNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureFlagsNV(VULKAN_HPP_NAMESPACE::Bool32 enablePartitionTranslation_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, enablePartitionTranslation{ enablePartitionTranslation_ }
{}
VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureFlagsNV( PartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PartitionedAccelerationStructureFlagsNV( VkPartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PartitionedAccelerationStructureFlagsNV( *reinterpret_cast<PartitionedAccelerationStructureFlagsNV const *>( &rhs ) )
{}
PartitionedAccelerationStructureFlagsNV & operator=( PartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PartitionedAccelerationStructureFlagsNV & operator=( VkPartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureFlagsNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureFlagsNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureFlagsNV & setEnablePartitionTranslation( VULKAN_HPP_NAMESPACE::Bool32 enablePartitionTranslation_ ) VULKAN_HPP_NOEXCEPT
{
enablePartitionTranslation = enablePartitionTranslation_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPartitionedAccelerationStructureFlagsNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPartitionedAccelerationStructureFlagsNV*>( this );
}
operator VkPartitionedAccelerationStructureFlagsNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPartitionedAccelerationStructureFlagsNV*>( this );
}
operator VkPartitionedAccelerationStructureFlagsNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPartitionedAccelerationStructureFlagsNV*>( this );
}
operator VkPartitionedAccelerationStructureFlagsNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPartitionedAccelerationStructureFlagsNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, enablePartitionTranslation );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PartitionedAccelerationStructureFlagsNV const & ) const = default;
#else
bool operator==( PartitionedAccelerationStructureFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( enablePartitionTranslation == rhs.enablePartitionTranslation );
#endif
}
bool operator!=( PartitionedAccelerationStructureFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePartitionedAccelerationStructureFlagsNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 enablePartitionTranslation = {};
};
template <>
struct CppType<StructureType, StructureType::ePartitionedAccelerationStructureFlagsNV>
{
using Type = PartitionedAccelerationStructureFlagsNV;
};
// wrapper struct for struct VkPartitionedAccelerationStructureUpdateInstanceDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureUpdateInstanceDataNV.html
struct PartitionedAccelerationStructureUpdateInstanceDataNV
{
using NativeType = VkPartitionedAccelerationStructureUpdateInstanceDataNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureUpdateInstanceDataNV(uint32_t instanceIndex_ = {}, uint32_t instanceContributionToHitGroupIndex_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
: instanceIndex{ instanceIndex_ }, instanceContributionToHitGroupIndex{ instanceContributionToHitGroupIndex_ }, accelerationStructure{ accelerationStructure_ }
{}
VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureUpdateInstanceDataNV( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PartitionedAccelerationStructureUpdateInstanceDataNV( VkPartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PartitionedAccelerationStructureUpdateInstanceDataNV( *reinterpret_cast<PartitionedAccelerationStructureUpdateInstanceDataNV const *>( &rhs ) )
{}
PartitionedAccelerationStructureUpdateInstanceDataNV & operator=( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PartitionedAccelerationStructureUpdateInstanceDataNV & operator=( VkPartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureUpdateInstanceDataNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & setInstanceIndex( uint32_t instanceIndex_ ) VULKAN_HPP_NOEXCEPT
{
instanceIndex = instanceIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & setInstanceContributionToHitGroupIndex( uint32_t instanceContributionToHitGroupIndex_ ) VULKAN_HPP_NOEXCEPT
{
instanceContributionToHitGroupIndex = instanceContributionToHitGroupIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPartitionedAccelerationStructureUpdateInstanceDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPartitionedAccelerationStructureUpdateInstanceDataNV*>( this );
}
operator VkPartitionedAccelerationStructureUpdateInstanceDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPartitionedAccelerationStructureUpdateInstanceDataNV*>( this );
}
operator VkPartitionedAccelerationStructureUpdateInstanceDataNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPartitionedAccelerationStructureUpdateInstanceDataNV*>( this );
}
operator VkPartitionedAccelerationStructureUpdateInstanceDataNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPartitionedAccelerationStructureUpdateInstanceDataNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( instanceIndex, instanceContributionToHitGroupIndex, accelerationStructure );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PartitionedAccelerationStructureUpdateInstanceDataNV const & ) const = default;
#else
bool operator==( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( instanceIndex == rhs.instanceIndex )
&& ( instanceContributionToHitGroupIndex == rhs.instanceContributionToHitGroupIndex )
&& ( accelerationStructure == rhs.accelerationStructure );
#endif
}
bool operator!=( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t instanceIndex = {};
uint32_t instanceContributionToHitGroupIndex = {};
VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure = {};
};
// wrapper struct for struct VkPartitionedAccelerationStructureWriteInstanceDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureWriteInstanceDataNV.html
struct PartitionedAccelerationStructureWriteInstanceDataNV
{
using NativeType = VkPartitionedAccelerationStructureWriteInstanceDataNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, std::array<float,6> const & explicitAABB_ = {}, uint32_t instanceID_ = {}, uint32_t instanceMask_ = {}, uint32_t instanceContributionToHitGroupIndex_ = {}, VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV instanceFlags_ = {}, uint32_t instanceIndex_ = {}, uint32_t partitionIndex_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
: transform{ transform_ }, explicitAABB{ explicitAABB_ }, instanceID{ instanceID_ }, instanceMask{ instanceMask_ }, instanceContributionToHitGroupIndex{ instanceContributionToHitGroupIndex_ }, instanceFlags{ instanceFlags_ }, instanceIndex{ instanceIndex_ }, partitionIndex{ partitionIndex_ }, accelerationStructure{ accelerationStructure_ }
{}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PartitionedAccelerationStructureWriteInstanceDataNV( VkPartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PartitionedAccelerationStructureWriteInstanceDataNV( *reinterpret_cast<PartitionedAccelerationStructureWriteInstanceDataNV const *>( &rhs ) )
{}
PartitionedAccelerationStructureWriteInstanceDataNV & operator=( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PartitionedAccelerationStructureWriteInstanceDataNV & operator=( VkPartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureWriteInstanceDataNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT
{
transform = transform_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setExplicitAABB( std::array<float,6> explicitAABB_ ) VULKAN_HPP_NOEXCEPT
{
explicitAABB = explicitAABB_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceID( uint32_t instanceID_ ) VULKAN_HPP_NOEXCEPT
{
instanceID = instanceID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceMask( uint32_t instanceMask_ ) VULKAN_HPP_NOEXCEPT
{
instanceMask = instanceMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceContributionToHitGroupIndex( uint32_t instanceContributionToHitGroupIndex_ ) VULKAN_HPP_NOEXCEPT
{
instanceContributionToHitGroupIndex = instanceContributionToHitGroupIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceFlags( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV instanceFlags_ ) VULKAN_HPP_NOEXCEPT
{
instanceFlags = instanceFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceIndex( uint32_t instanceIndex_ ) VULKAN_HPP_NOEXCEPT
{
instanceIndex = instanceIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setPartitionIndex( uint32_t partitionIndex_ ) VULKAN_HPP_NOEXCEPT
{
partitionIndex = partitionIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPartitionedAccelerationStructureWriteInstanceDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPartitionedAccelerationStructureWriteInstanceDataNV*>( this );
}
operator VkPartitionedAccelerationStructureWriteInstanceDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPartitionedAccelerationStructureWriteInstanceDataNV*>( this );
}
operator VkPartitionedAccelerationStructureWriteInstanceDataNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPartitionedAccelerationStructureWriteInstanceDataNV*>( this );
}
operator VkPartitionedAccelerationStructureWriteInstanceDataNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPartitionedAccelerationStructureWriteInstanceDataNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 6> const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( transform, explicitAABB, instanceID, instanceMask, instanceContributionToHitGroupIndex, instanceFlags, instanceIndex, partitionIndex, accelerationStructure );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PartitionedAccelerationStructureWriteInstanceDataNV const & ) const = default;
#else
bool operator==( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( transform == rhs.transform )
&& ( explicitAABB == rhs.explicitAABB )
&& ( instanceID == rhs.instanceID )
&& ( instanceMask == rhs.instanceMask )
&& ( instanceContributionToHitGroupIndex == rhs.instanceContributionToHitGroupIndex )
&& ( instanceFlags == rhs.instanceFlags )
&& ( instanceIndex == rhs.instanceIndex )
&& ( partitionIndex == rhs.partitionIndex )
&& ( accelerationStructure == rhs.accelerationStructure );
#endif
}
bool operator!=( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 6> explicitAABB = {};
uint32_t instanceID = {};
uint32_t instanceMask = {};
uint32_t instanceContributionToHitGroupIndex = {};
VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV instanceFlags = {};
uint32_t instanceIndex = {};
uint32_t partitionIndex = {};
VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure = {};
};
// wrapper struct for struct VkPartitionedAccelerationStructureWritePartitionTranslationDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureWritePartitionTranslationDataNV.html
struct PartitionedAccelerationStructureWritePartitionTranslationDataNV
{
using NativeType = VkPartitionedAccelerationStructureWritePartitionTranslationDataNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV(uint32_t partitionIndex_ = {}, std::array<float,3> const & partitionTranslation_ = {}) VULKAN_HPP_NOEXCEPT
: partitionIndex{ partitionIndex_ }, partitionTranslation{ partitionTranslation_ }
{}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PartitionedAccelerationStructureWritePartitionTranslationDataNV( VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PartitionedAccelerationStructureWritePartitionTranslationDataNV( *reinterpret_cast<PartitionedAccelerationStructureWritePartitionTranslationDataNV const *>( &rhs ) )
{}
PartitionedAccelerationStructureWritePartitionTranslationDataNV & operator=( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PartitionedAccelerationStructureWritePartitionTranslationDataNV & operator=( VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureWritePartitionTranslationDataNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV & setPartitionIndex( uint32_t partitionIndex_ ) VULKAN_HPP_NOEXCEPT
{
partitionIndex = partitionIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV & setPartitionTranslation( std::array<float,3> partitionTranslation_ ) VULKAN_HPP_NOEXCEPT
{
partitionTranslation = partitionTranslation_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPartitionedAccelerationStructureWritePartitionTranslationDataNV*>( this );
}
operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPartitionedAccelerationStructureWritePartitionTranslationDataNV*>( this );
}
operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPartitionedAccelerationStructureWritePartitionTranslationDataNV*>( this );
}
operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPartitionedAccelerationStructureWritePartitionTranslationDataNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 3> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( partitionIndex, partitionTranslation );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & ) const = default;
#else
bool operator==( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( partitionIndex == rhs.partitionIndex )
&& ( partitionTranslation == rhs.partitionTranslation );
#endif
}
bool operator!=( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t partitionIndex = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 3> partitionTranslation = {};
};
// wrapper struct for struct VkPastPresentationTimingGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPastPresentationTimingGOOGLE.html
struct PastPresentationTimingGOOGLE
{
using NativeType = VkPastPresentationTimingGOOGLE;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}, uint64_t actualPresentTime_ = {}, uint64_t earliestPresentTime_ = {}, uint64_t presentMargin_ = {}) VULKAN_HPP_NOEXCEPT
: presentID{ presentID_ }, desiredPresentTime{ desiredPresentTime_ }, actualPresentTime{ actualPresentTime_ }, earliestPresentTime{ earliestPresentTime_ }, presentMargin{ presentMargin_ }
{}
VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
: PastPresentationTimingGOOGLE( *reinterpret_cast<PastPresentationTimingGOOGLE const *>( &rhs ) )
{}
PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const *>( &rhs );
return *this;
}
operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPastPresentationTimingGOOGLE*>( this );
}
operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPastPresentationTimingGOOGLE*>( this );
}
operator VkPastPresentationTimingGOOGLE const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPastPresentationTimingGOOGLE*>( this );
}
operator VkPastPresentationTimingGOOGLE *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPastPresentationTimingGOOGLE*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( presentID, desiredPresentTime, actualPresentTime, earliestPresentTime, presentMargin );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default;
#else
bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( presentID == rhs.presentID )
&& ( desiredPresentTime == rhs.desiredPresentTime )
&& ( actualPresentTime == rhs.actualPresentTime )
&& ( earliestPresentTime == rhs.earliestPresentTime )
&& ( presentMargin == rhs.presentMargin );
#endif
}
bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t presentID = {};
uint64_t desiredPresentTime = {};
uint64_t actualPresentTime = {};
uint64_t earliestPresentTime = {};
uint64_t presentMargin = {};
};
// wrapper struct for struct VkPerTileBeginInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerTileBeginInfoQCOM.html
struct PerTileBeginInfoQCOM
{
using NativeType = VkPerTileBeginInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerTileBeginInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PerTileBeginInfoQCOM(const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }
{}
VULKAN_HPP_CONSTEXPR PerTileBeginInfoQCOM( PerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerTileBeginInfoQCOM( VkPerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PerTileBeginInfoQCOM( *reinterpret_cast<PerTileBeginInfoQCOM const *>( &rhs ) )
{}
PerTileBeginInfoQCOM & operator=( PerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PerTileBeginInfoQCOM & operator=( VkPerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerTileBeginInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PerTileBeginInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPerTileBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerTileBeginInfoQCOM*>( this );
}
operator VkPerTileBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerTileBeginInfoQCOM*>( this );
}
operator VkPerTileBeginInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPerTileBeginInfoQCOM*>( this );
}
operator VkPerTileBeginInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPerTileBeginInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PerTileBeginInfoQCOM const & ) const = default;
#else
bool operator==( PerTileBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext );
#endif
}
bool operator!=( PerTileBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerTileBeginInfoQCOM;
const void * pNext = {};
};
template <>
struct CppType<StructureType, StructureType::ePerTileBeginInfoQCOM>
{
using Type = PerTileBeginInfoQCOM;
};
// wrapper struct for struct VkPerTileEndInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerTileEndInfoQCOM.html
struct PerTileEndInfoQCOM
{
using NativeType = VkPerTileEndInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerTileEndInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PerTileEndInfoQCOM(const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }
{}
VULKAN_HPP_CONSTEXPR PerTileEndInfoQCOM( PerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerTileEndInfoQCOM( VkPerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PerTileEndInfoQCOM( *reinterpret_cast<PerTileEndInfoQCOM const *>( &rhs ) )
{}
PerTileEndInfoQCOM & operator=( PerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PerTileEndInfoQCOM & operator=( VkPerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerTileEndInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PerTileEndInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPerTileEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerTileEndInfoQCOM*>( this );
}
operator VkPerTileEndInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerTileEndInfoQCOM*>( this );
}
operator VkPerTileEndInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPerTileEndInfoQCOM*>( this );
}
operator VkPerTileEndInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPerTileEndInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PerTileEndInfoQCOM const & ) const = default;
#else
bool operator==( PerTileEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext );
#endif
}
bool operator!=( PerTileEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerTileEndInfoQCOM;
const void * pNext = {};
};
template <>
struct CppType<StructureType, StructureType::ePerTileEndInfoQCOM>
{
using Type = PerTileEndInfoQCOM;
};
// wrapper struct for struct VkPerformanceConfigurationAcquireInfoINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceConfigurationAcquireInfoINTEL.html
struct PerformanceConfigurationAcquireInfoINTEL
{
using NativeType = VkPerformanceConfigurationAcquireInfoINTEL;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, type{ type_ }
{}
VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast<PerformanceConfigurationAcquireInfoINTEL const *>( &rhs ) )
{}
PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( this );
}
operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>( this );
}
operator VkPerformanceConfigurationAcquireInfoINTEL const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( this );
}
operator VkPerformanceConfigurationAcquireInfoINTEL *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, type );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default;
#else
bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( type == rhs.type );
#endif
}
bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
};
template <>
struct CppType<StructureType, StructureType::ePerformanceConfigurationAcquireInfoINTEL>
{
using Type = PerformanceConfigurationAcquireInfoINTEL;
};
// wrapper struct for struct VkPerformanceCounterDescriptionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterDescriptionKHR.html
struct PerformanceCounterDescriptionKHR
{
using NativeType = VkPerformanceCounterDescriptionKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & category_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, name{ name_ }, category{ category_ }, description{ description_ }
{}
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceCounterDescriptionKHR( *reinterpret_cast<PerformanceCounterDescriptionKHR const *>( &rhs ) )
{}
PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const *>( &rhs );
return *this;
}
operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR*>( this );
}
operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( this );
}
operator VkPerformanceCounterDescriptionKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPerformanceCounterDescriptionKHR*>( this );
}
operator VkPerformanceCounterDescriptionKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, name, category, description );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = strcmp( category, rhs.category ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( strcmp( name, rhs.name ) == 0 )
&& ( strcmp( category, rhs.category ) == 0 )
&& ( strcmp( description, rhs.description ) == 0 );
}
bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> category = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
};
template <>
struct CppType<StructureType, StructureType::ePerformanceCounterDescriptionKHR>
{
using Type = PerformanceCounterDescriptionKHR;
};
// wrapper struct for struct VkPerformanceCounterKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterKHR.html
struct PerformanceCounterKHR
{
using NativeType = VkPerformanceCounterKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, std::array<uint8_t,VK_UUID_SIZE> const & uuid_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, unit{ unit_ }, scope{ scope_ }, storage{ storage_ }, uuid{ uuid_ }
{}
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceCounterKHR( *reinterpret_cast<PerformanceCounterKHR const *>( &rhs ) )
{}
PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const *>( &rhs );
return *this;
}
operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceCounterKHR*>( this );
}
operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceCounterKHR*>( this );
}
operator VkPerformanceCounterKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPerformanceCounterKHR*>( this );
}
operator VkPerformanceCounterKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPerformanceCounterKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR const &, VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR const &, VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, unit, scope, storage, uuid );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PerformanceCounterKHR const & ) const = default;
#else
bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( unit == rhs.unit )
&& ( scope == rhs.scope )
&& ( storage == rhs.storage )
&& ( uuid == rhs.uuid );
#endif
}
bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric;
VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer;
VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> uuid = {};
};
template <>
struct CppType<StructureType, StructureType::ePerformanceCounterKHR>
{
using Type = PerformanceCounterKHR;
};
union PerformanceCounterResultKHR
{
using NativeType = VkPerformanceCounterResultKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int32_t int32_ = {} )
: int32( int32_ )
{}
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int64_t int64_ )
: int64( int64_ )
{}
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint32_t uint32_ )
: uint32( uint32_ )
{}
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint64_t uint64_ )
: uint64( uint64_ )
{}
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( float float32_ )
: float32( float32_ )
{}
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( double float64_ )
: float64( float64_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT
{
int32 = int32_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT
{
int64 = int64_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT
{
uint32 = uint32_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT
{
uint64 = uint64_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT
{
float32 = float32_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT
{
float64 = float64_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPerformanceCounterResultKHR const &() const
{
return *reinterpret_cast<const VkPerformanceCounterResultKHR*>( this );
}
operator VkPerformanceCounterResultKHR &()
{
return *reinterpret_cast<VkPerformanceCounterResultKHR*>( this );
}
int32_t int32;
int64_t int64;
uint32_t uint32;
uint64_t uint64;
float float32;
double float64;
};
// wrapper struct for struct VkPerformanceMarkerInfoINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceMarkerInfoINTEL.html
struct PerformanceMarkerInfoINTEL
{
using NativeType = VkPerformanceMarkerInfoINTEL;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(uint64_t marker_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, marker{ marker_ }
{}
VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceMarkerInfoINTEL( *reinterpret_cast<PerformanceMarkerInfoINTEL const *>( &rhs ) )
{}
PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
{
marker = marker_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
}
operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
}
operator VkPerformanceMarkerInfoINTEL const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
}
operator VkPerformanceMarkerInfoINTEL *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, marker );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default;
#else
bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( marker == rhs.marker );
#endif
}
bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
const void * pNext = {};
uint64_t marker = {};
};
template <>
struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
{
using Type = PerformanceMarkerInfoINTEL;
};
// wrapper struct for struct VkPerformanceOverrideInfoINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceOverrideInfoINTEL.html
struct PerformanceOverrideInfoINTEL
{
using NativeType = VkPerformanceOverrideInfoINTEL;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, type{ type_ }, enable{ enable_ }, parameter{ parameter_ }
{}
VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceOverrideInfoINTEL( *reinterpret_cast<PerformanceOverrideInfoINTEL const *>( &rhs ) )
{}
PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
{
enable = enable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
{
parameter = parameter_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
}
operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
}
operator VkPerformanceOverrideInfoINTEL const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
}
operator VkPerformanceOverrideInfoINTEL *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, type, enable, parameter );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default;
#else
bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( type == rhs.type )
&& ( enable == rhs.enable )
&& ( parameter == rhs.parameter );
#endif
}
bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
VULKAN_HPP_NAMESPACE::Bool32 enable = {};
uint64_t parameter = {};
};
template <>
struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
{
using Type = PerformanceOverrideInfoINTEL;
};
// wrapper struct for struct VkPerformanceQuerySubmitInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceQuerySubmitInfoKHR.html
struct PerformanceQuerySubmitInfoKHR
{
using NativeType = VkPerformanceQuerySubmitInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR(uint32_t counterPassIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, counterPassIndex{ counterPassIndex_ }
{}
VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceQuerySubmitInfoKHR( *reinterpret_cast<PerformanceQuerySubmitInfoKHR const *>( &rhs ) )
{}
PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
{
counterPassIndex = counterPassIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR*>( this );
}
operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR*>( this );
}
operator VkPerformanceQuerySubmitInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR*>( this );
}
operator VkPerformanceQuerySubmitInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPerformanceQuerySubmitInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, counterPassIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default;
#else
bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( counterPassIndex == rhs.counterPassIndex );
#endif
}
bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR;
const void * pNext = {};
uint32_t counterPassIndex = {};
};
template <>
struct CppType<StructureType, StructureType::ePerformanceQuerySubmitInfoKHR>
{
using Type = PerformanceQuerySubmitInfoKHR;
};
// wrapper struct for struct VkPerformanceStreamMarkerInfoINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceStreamMarkerInfoINTEL.html
struct PerformanceStreamMarkerInfoINTEL
{
using NativeType = VkPerformanceStreamMarkerInfoINTEL;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(uint32_t marker_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, marker{ marker_ }
{}
VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceStreamMarkerInfoINTEL( *reinterpret_cast<PerformanceStreamMarkerInfoINTEL const *>( &rhs ) )
{}
PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
{
marker = marker_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
}
operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
}
operator VkPerformanceStreamMarkerInfoINTEL const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
}
operator VkPerformanceStreamMarkerInfoINTEL *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, marker );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default;
#else
bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( marker == rhs.marker );
#endif
}
bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
const void * pNext = {};
uint32_t marker = {};
};
template <>
struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
{
using Type = PerformanceStreamMarkerInfoINTEL;
};
union PerformanceValueDataINTEL
{
using NativeType = VkPerformanceValueDataINTEL;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint32_t value32_ = {} )
: value32( value32_ )
{}
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint64_t value64_ )
: value64( value64_ )
{}
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( float valueFloat_ )
: valueFloat( valueFloat_ )
{}
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( const char * valueString_ )
: valueString( valueString_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT
{
value32 = value32_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT
{
value64 = value64_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT
{
valueFloat = valueFloat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
{
valueBool = valueBool_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT
{
valueString = valueString_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPerformanceValueDataINTEL const &() const
{
return *reinterpret_cast<const VkPerformanceValueDataINTEL*>( this );
}
operator VkPerformanceValueDataINTEL &()
{
return *reinterpret_cast<VkPerformanceValueDataINTEL*>( this );
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
uint32_t value32;
uint64_t value64;
float valueFloat;
VULKAN_HPP_NAMESPACE::Bool32 valueBool;
const char * valueString;
#else
uint32_t value32;
uint64_t value64;
float valueFloat;
VkBool32 valueBool;
const char * valueString;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
// wrapper struct for struct VkPerformanceValueINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceValueINTEL.html
struct PerformanceValueINTEL
{
using NativeType = VkPerformanceValueINTEL;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL(VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {}) VULKAN_HPP_NOEXCEPT
: type{ type_ }, data{ data_ }
{}
VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceValueINTEL( *reinterpret_cast<PerformanceValueINTEL const *>( &rhs ) )
{}
PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL const *>( &rhs );
return *this;
}
operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceValueINTEL*>( this );
}
operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceValueINTEL*>( this );
}
operator VkPerformanceValueINTEL const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPerformanceValueINTEL*>( this );
}
operator VkPerformanceValueINTEL *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPerformanceValueINTEL*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL const &, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( type, data );
}
#endif
public:
VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32;
VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {};
};
// wrapper struct for struct VkPhysicalDevice16BitStorageFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice16BitStorageFeatures.html
struct PhysicalDevice16BitStorageFeatures
{
using NativeType = VkPhysicalDevice16BitStorageFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, storageBuffer16BitAccess{ storageBuffer16BitAccess_ }, uniformAndStorageBuffer16BitAccess{ uniformAndStorageBuffer16BitAccess_ }, storagePushConstant16{ storagePushConstant16_ }, storageInputOutput16{ storageInputOutput16_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevice16BitStorageFeatures( *reinterpret_cast<PhysicalDevice16BitStorageFeatures const *>( &rhs ) )
{}
PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
storageBuffer16BitAccess = storageBuffer16BitAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
{
storagePushConstant16 = storagePushConstant16_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
{
storageInputOutput16 = storageInputOutput16_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures*>( this );
}
operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures*>( this );
}
operator VkPhysicalDevice16BitStorageFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures*>( this );
}
operator VkPhysicalDevice16BitStorageFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevice16BitStorageFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default;
#else
bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
&& ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
&& ( storagePushConstant16 == rhs.storagePushConstant16 )
&& ( storageInputOutput16 == rhs.storageInputOutput16 );
#endif
}
bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevice16BitStorageFeatures>
{
using Type = PhysicalDevice16BitStorageFeatures;
};
using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
// wrapper struct for struct VkPhysicalDevice4444FormatsFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice4444FormatsFeaturesEXT.html
struct PhysicalDevice4444FormatsFeaturesEXT
{
using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, formatA4R4G4B4{ formatA4R4G4B4_ }, formatA4B4G4R4{ formatA4B4G4R4_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs ) )
{}
PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT
{
formatA4R4G4B4 = formatA4R4G4B4_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT
{
formatA4B4G4R4 = formatA4B4G4R4_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
}
operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
}
operator VkPhysicalDevice4444FormatsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
}
operator VkPhysicalDevice4444FormatsFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, formatA4R4G4B4, formatA4B4G4R4 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( formatA4R4G4B4 == rhs.formatA4R4G4B4 )
&& ( formatA4B4G4R4 == rhs.formatA4B4G4R4 );
#endif
}
bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {};
VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevice4444FormatsFeaturesEXT>
{
using Type = PhysicalDevice4444FormatsFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDevice8BitStorageFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice8BitStorageFeatures.html
struct PhysicalDevice8BitStorageFeatures
{
using NativeType = VkPhysicalDevice8BitStorageFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, storageBuffer8BitAccess{ storageBuffer8BitAccess_ }, uniformAndStorageBuffer8BitAccess{ uniformAndStorageBuffer8BitAccess_ }, storagePushConstant8{ storagePushConstant8_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevice8BitStorageFeatures( *reinterpret_cast<PhysicalDevice8BitStorageFeatures const *>( &rhs ) )
{}
PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
storageBuffer8BitAccess = storageBuffer8BitAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
{
storagePushConstant8 = storagePushConstant8_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures*>( this );
}
operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevice8BitStorageFeatures*>( this );
}
operator VkPhysicalDevice8BitStorageFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures*>( this );
}
operator VkPhysicalDevice8BitStorageFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevice8BitStorageFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default;
#else
bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
&& ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
&& ( storagePushConstant8 == rhs.storagePushConstant8 );
#endif
}
bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevice8BitStorageFeatures>
{
using Type = PhysicalDevice8BitStorageFeatures;
};
using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;
// wrapper struct for struct VkPhysicalDeviceASTCDecodeFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceASTCDecodeFeaturesEXT.html
struct PhysicalDeviceASTCDecodeFeaturesEXT
{
using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, decodeModeSharedExponent{ decodeModeSharedExponent_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast<PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
{
decodeModeSharedExponent = decodeModeSharedExponent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
}
operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
}
operator VkPhysicalDeviceASTCDecodeFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
}
operator VkPhysicalDeviceASTCDecodeFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, decodeModeSharedExponent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
#endif
}
bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT>
{
using Type = PhysicalDeviceASTCDecodeFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceAccelerationStructureFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAccelerationStructureFeaturesKHR.html
struct PhysicalDeviceAccelerationStructureFeaturesKHR
{
using NativeType = VkPhysicalDeviceAccelerationStructureFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, accelerationStructure{ accelerationStructure_ }, accelerationStructureCaptureReplay{ accelerationStructureCaptureReplay_ }, accelerationStructureIndirectBuild{ accelerationStructureIndirectBuild_ }, accelerationStructureHostCommands{ accelerationStructureHostCommands_ }, descriptorBindingAccelerationStructureUpdateAfterBind{ descriptorBindingAccelerationStructureUpdateAfterBind_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceAccelerationStructureFeaturesKHR( *reinterpret_cast<PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureIndirectBuild( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureHostCommands( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureHostCommands = accelerationStructureHostCommands_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructureFeaturesKHR*>( this );
}
operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceAccelerationStructureFeaturesKHR*>( this );
}
operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceAccelerationStructureFeaturesKHR*>( this );
}
operator VkPhysicalDeviceAccelerationStructureFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceAccelerationStructureFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, accelerationStructure, accelerationStructureCaptureReplay, accelerationStructureIndirectBuild, accelerationStructureHostCommands, descriptorBindingAccelerationStructureUpdateAfterBind );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( accelerationStructure == rhs.accelerationStructure )
&& ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay )
&& ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild )
&& ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands )
&& ( descriptorBindingAccelerationStructureUpdateAfterBind == rhs.descriptorBindingAccelerationStructureUpdateAfterBind );
#endif
}
bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {};
VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {};
VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR>
{
using Type = PhysicalDeviceAccelerationStructureFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceAccelerationStructurePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAccelerationStructurePropertiesKHR.html
struct PhysicalDeviceAccelerationStructurePropertiesKHR
{
using NativeType = VkPhysicalDeviceAccelerationStructurePropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR(uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxPrimitiveCount_ = {}, uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, uint32_t minAccelerationStructureScratchOffsetAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxGeometryCount{ maxGeometryCount_ }, maxInstanceCount{ maxInstanceCount_ }, maxPrimitiveCount{ maxPrimitiveCount_ }, maxPerStageDescriptorAccelerationStructures{ maxPerStageDescriptorAccelerationStructures_ }, maxPerStageDescriptorUpdateAfterBindAccelerationStructures{ maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ }, maxDescriptorSetAccelerationStructures{ maxDescriptorSetAccelerationStructures_ }, maxDescriptorSetUpdateAfterBindAccelerationStructures{ maxDescriptorSetUpdateAfterBindAccelerationStructures_ }, minAccelerationStructureScratchOffsetAlignment{ minAccelerationStructureScratchOffsetAlignment_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceAccelerationStructurePropertiesKHR( *reinterpret_cast<PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs ) )
{}
PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructurePropertiesKHR*>( this );
}
operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceAccelerationStructurePropertiesKHR*>( this );
}
operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceAccelerationStructurePropertiesKHR*>( this );
}
operator VkPhysicalDeviceAccelerationStructurePropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceAccelerationStructurePropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &, uint64_t const &, uint64_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxGeometryCount, maxInstanceCount, maxPrimitiveCount, maxPerStageDescriptorAccelerationStructures, maxPerStageDescriptorUpdateAfterBindAccelerationStructures, maxDescriptorSetAccelerationStructures, maxDescriptorSetUpdateAfterBindAccelerationStructures, minAccelerationStructureScratchOffsetAlignment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxGeometryCount == rhs.maxGeometryCount )
&& ( maxInstanceCount == rhs.maxInstanceCount )
&& ( maxPrimitiveCount == rhs.maxPrimitiveCount )
&& ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures )
&& ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures )
&& ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures )
&& ( maxDescriptorSetUpdateAfterBindAccelerationStructures == rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures )
&& ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment );
#endif
}
bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
void * pNext = {};
uint64_t maxGeometryCount = {};
uint64_t maxInstanceCount = {};
uint64_t maxPrimitiveCount = {};
uint32_t maxPerStageDescriptorAccelerationStructures = {};
uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {};
uint32_t maxDescriptorSetAccelerationStructures = {};
uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {};
uint32_t minAccelerationStructureScratchOffsetAlignment = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR>
{
using Type = PhysicalDeviceAccelerationStructurePropertiesKHR;
};
// wrapper struct for struct VkPhysicalDeviceAddressBindingReportFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAddressBindingReportFeaturesEXT.html
struct PhysicalDeviceAddressBindingReportFeaturesEXT
{
using NativeType = VkPhysicalDeviceAddressBindingReportFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, reportAddressBinding{ reportAddressBinding_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceAddressBindingReportFeaturesEXT( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceAddressBindingReportFeaturesEXT( *reinterpret_cast<PhysicalDeviceAddressBindingReportFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setReportAddressBinding( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ ) VULKAN_HPP_NOEXCEPT
{
reportAddressBinding = reportAddressBinding_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceAddressBindingReportFeaturesEXT*>( this );
}
operator VkPhysicalDeviceAddressBindingReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceAddressBindingReportFeaturesEXT*>( this );
}
operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceAddressBindingReportFeaturesEXT*>( this );
}
operator VkPhysicalDeviceAddressBindingReportFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceAddressBindingReportFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, reportAddressBinding );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceAddressBindingReportFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( reportAddressBinding == rhs.reportAddressBinding );
#endif
}
bool operator!=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT>
{
using Type = PhysicalDeviceAddressBindingReportFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceAmigoProfilingFeaturesSEC, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAmigoProfilingFeaturesSEC.html
struct PhysicalDeviceAmigoProfilingFeaturesSEC
{
using NativeType = VkPhysicalDeviceAmigoProfilingFeaturesSEC;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC(VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, amigoProfiling{ amigoProfiling_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceAmigoProfilingFeaturesSEC( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceAmigoProfilingFeaturesSEC( *reinterpret_cast<PhysicalDeviceAmigoProfilingFeaturesSEC const *>( &rhs ) )
{}
PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setAmigoProfiling( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ ) VULKAN_HPP_NOEXCEPT
{
amigoProfiling = amigoProfiling_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceAmigoProfilingFeaturesSEC*>( this );
}
operator VkPhysicalDeviceAmigoProfilingFeaturesSEC &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceAmigoProfilingFeaturesSEC*>( this );
}
operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceAmigoProfilingFeaturesSEC*>( this );
}
operator VkPhysicalDeviceAmigoProfilingFeaturesSEC *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceAmigoProfilingFeaturesSEC*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, amigoProfiling );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceAmigoProfilingFeaturesSEC const & ) const = default;
#else
bool operator==( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( amigoProfiling == rhs.amigoProfiling );
#endif
}
bool operator!=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC>
{
using Type = PhysicalDeviceAmigoProfilingFeaturesSEC;
};
// wrapper struct for struct VkPhysicalDeviceAntiLagFeaturesAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAntiLagFeaturesAMD.html
struct PhysicalDeviceAntiLagFeaturesAMD
{
using NativeType = VkPhysicalDeviceAntiLagFeaturesAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAntiLagFeaturesAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceAntiLagFeaturesAMD(VULKAN_HPP_NAMESPACE::Bool32 antiLag_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, antiLag{ antiLag_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceAntiLagFeaturesAMD( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceAntiLagFeaturesAMD( VkPhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceAntiLagFeaturesAMD( *reinterpret_cast<PhysicalDeviceAntiLagFeaturesAMD const *>( &rhs ) )
{}
PhysicalDeviceAntiLagFeaturesAMD & operator=( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceAntiLagFeaturesAMD & operator=( VkPhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAntiLagFeaturesAMD const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD & setAntiLag( VULKAN_HPP_NAMESPACE::Bool32 antiLag_ ) VULKAN_HPP_NOEXCEPT
{
antiLag = antiLag_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceAntiLagFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceAntiLagFeaturesAMD*>( this );
}
operator VkPhysicalDeviceAntiLagFeaturesAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceAntiLagFeaturesAMD*>( this );
}
operator VkPhysicalDeviceAntiLagFeaturesAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceAntiLagFeaturesAMD*>( this );
}
operator VkPhysicalDeviceAntiLagFeaturesAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceAntiLagFeaturesAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, antiLag );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceAntiLagFeaturesAMD const & ) const = default;
#else
bool operator==( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( antiLag == rhs.antiLag );
#endif
}
bool operator!=( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAntiLagFeaturesAMD;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 antiLag = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceAntiLagFeaturesAMD>
{
using Type = PhysicalDeviceAntiLagFeaturesAMD;
};
// wrapper struct for struct VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.html
struct PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT
{
using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, attachmentFeedbackLoopDynamicState{ attachmentFeedbackLoopDynamicState_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & operator=( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & setAttachmentFeedbackLoopDynamicState( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState_ ) VULKAN_HPP_NOEXCEPT
{
attachmentFeedbackLoopDynamicState = attachmentFeedbackLoopDynamicState_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT*>( this );
}
operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT*>( this );
}
operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT*>( this );
}
operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, attachmentFeedbackLoopDynamicState );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( attachmentFeedbackLoopDynamicState == rhs.attachmentFeedbackLoopDynamicState );
#endif
}
bool operator!=( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT>
{
using Type = PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.html
struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT
{
using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, attachmentFeedbackLoopLayout{ attachmentFeedbackLoopLayout_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( *reinterpret_cast<PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & operator=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & operator=( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & setAttachmentFeedbackLoopLayout( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ ) VULKAN_HPP_NOEXCEPT
{
attachmentFeedbackLoopLayout = attachmentFeedbackLoopLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT*>( this );
}
operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT*>( this );
}
operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT*>( this );
}
operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, attachmentFeedbackLoopLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( attachmentFeedbackLoopLayout == rhs.attachmentFeedbackLoopLayout );
#endif
}
bool operator!=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>
{
using Type = PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT.html
struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
{
using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, advancedBlendCoherentOperations{ advancedBlendCoherentOperations_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceBlendOperationAdvancedFeaturesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
{
advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
}
operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
}
operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
}
operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, advancedBlendCoherentOperations );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
#endif
}
bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT>
{
using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT.html
struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
{
using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(uint32_t advancedBlendMaxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, advancedBlendMaxColorAttachments{ advancedBlendMaxColorAttachments_ }, advancedBlendIndependentBlend{ advancedBlendIndependentBlend_ }, advancedBlendNonPremultipliedSrcColor{ advancedBlendNonPremultipliedSrcColor_ }, advancedBlendNonPremultipliedDstColor{ advancedBlendNonPremultipliedDstColor_ }, advancedBlendCorrelatedOverlap{ advancedBlendCorrelatedOverlap_ }, advancedBlendAllOperations{ advancedBlendAllOperations_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceBlendOperationAdvancedPropertiesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
}
operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
}
operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
}
operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, advancedBlendMaxColorAttachments, advancedBlendIndependentBlend, advancedBlendNonPremultipliedSrcColor, advancedBlendNonPremultipliedDstColor, advancedBlendCorrelatedOverlap, advancedBlendAllOperations );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments )
&& ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend )
&& ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor )
&& ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor )
&& ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap )
&& ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
#endif
}
bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
void * pNext = {};
uint32_t advancedBlendMaxColorAttachments = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT>
{
using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceBorderColorSwizzleFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBorderColorSwizzleFeaturesEXT.html
struct PhysicalDeviceBorderColorSwizzleFeaturesEXT
{
using NativeType = VkPhysicalDeviceBorderColorSwizzleFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, borderColorSwizzle{ borderColorSwizzle_ }, borderColorSwizzleFromImage{ borderColorSwizzleFromImage_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceBorderColorSwizzleFeaturesEXT( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceBorderColorSwizzleFeaturesEXT( *reinterpret_cast<PhysicalDeviceBorderColorSwizzleFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setBorderColorSwizzle( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ ) VULKAN_HPP_NOEXCEPT
{
borderColorSwizzle = borderColorSwizzle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setBorderColorSwizzleFromImage( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ ) VULKAN_HPP_NOEXCEPT
{
borderColorSwizzleFromImage = borderColorSwizzleFromImage_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBorderColorSwizzleFeaturesEXT*>( this );
}
operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceBorderColorSwizzleFeaturesEXT*>( this );
}
operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceBorderColorSwizzleFeaturesEXT*>( this );
}
operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceBorderColorSwizzleFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, borderColorSwizzle, borderColorSwizzleFromImage );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( borderColorSwizzle == rhs.borderColorSwizzle )
&& ( borderColorSwizzleFromImage == rhs.borderColorSwizzleFromImage );
#endif
}
bool operator!=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle = {};
VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT>
{
using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceBufferDeviceAddressFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBufferDeviceAddressFeatures.html
struct PhysicalDeviceBufferDeviceAddressFeatures
{
using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, bufferDeviceAddress{ bufferDeviceAddress_ }, bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ }, bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceBufferDeviceAddressFeatures( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs ) )
{}
PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddress = bufferDeviceAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
}
operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
}
operator VkPhysicalDeviceBufferDeviceAddressFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
}
operator VkPhysicalDeviceBufferDeviceAddressFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( bufferDeviceAddress == rhs.bufferDeviceAddress )
&& ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
&& ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
#endif
}
bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeatures>
{
using Type = PhysicalDeviceBufferDeviceAddressFeatures;
};
using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;
// wrapper struct for struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBufferDeviceAddressFeaturesEXT.html
struct PhysicalDeviceBufferDeviceAddressFeaturesEXT
{
using NativeType = VkPhysicalDeviceBufferDeviceAddressFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, bufferDeviceAddress{ bufferDeviceAddress_ }, bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ }, bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceBufferDeviceAddressFeaturesEXT( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddress = bufferDeviceAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
}
operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
}
operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
}
operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( bufferDeviceAddress == rhs.bufferDeviceAddress )
&& ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
&& ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
#endif
}
bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT>
{
using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
};
using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
// wrapper struct for struct VkPhysicalDeviceClusterAccelerationStructureFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterAccelerationStructureFeaturesNV.html
struct PhysicalDeviceClusterAccelerationStructureFeaturesNV
{
using NativeType = VkPhysicalDeviceClusterAccelerationStructureFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructureFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 clusterAccelerationStructure_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, clusterAccelerationStructure{ clusterAccelerationStructure_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructureFeaturesNV( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceClusterAccelerationStructureFeaturesNV( VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceClusterAccelerationStructureFeaturesNV( *reinterpret_cast<PhysicalDeviceClusterAccelerationStructureFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceClusterAccelerationStructureFeaturesNV & operator=( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceClusterAccelerationStructureFeaturesNV & operator=( VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterAccelerationStructureFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterAccelerationStructureFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterAccelerationStructureFeaturesNV & setClusterAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 clusterAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
clusterAccelerationStructure = clusterAccelerationStructure_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceClusterAccelerationStructureFeaturesNV*>( this );
}
operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceClusterAccelerationStructureFeaturesNV*>( this );
}
operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceClusterAccelerationStructureFeaturesNV*>( this );
}
operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceClusterAccelerationStructureFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, clusterAccelerationStructure );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( clusterAccelerationStructure == rhs.clusterAccelerationStructure );
#endif
}
bool operator!=( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 clusterAccelerationStructure = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV>
{
using Type = PhysicalDeviceClusterAccelerationStructureFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceClusterAccelerationStructurePropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterAccelerationStructurePropertiesNV.html
struct PhysicalDeviceClusterAccelerationStructurePropertiesNV
{
using NativeType = VkPhysicalDeviceClusterAccelerationStructurePropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructurePropertiesNV(uint32_t maxVerticesPerCluster_ = {}, uint32_t maxTrianglesPerCluster_ = {}, uint32_t clusterScratchByteAlignment_ = {}, uint32_t clusterByteAlignment_ = {}, uint32_t clusterTemplateByteAlignment_ = {}, uint32_t clusterBottomLevelByteAlignment_ = {}, uint32_t clusterTemplateBoundsByteAlignment_ = {}, uint32_t maxClusterGeometryIndex_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxVerticesPerCluster{ maxVerticesPerCluster_ }, maxTrianglesPerCluster{ maxTrianglesPerCluster_ }, clusterScratchByteAlignment{ clusterScratchByteAlignment_ }, clusterByteAlignment{ clusterByteAlignment_ }, clusterTemplateByteAlignment{ clusterTemplateByteAlignment_ }, clusterBottomLevelByteAlignment{ clusterBottomLevelByteAlignment_ }, clusterTemplateBoundsByteAlignment{ clusterTemplateBoundsByteAlignment_ }, maxClusterGeometryIndex{ maxClusterGeometryIndex_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructurePropertiesNV( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceClusterAccelerationStructurePropertiesNV( VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceClusterAccelerationStructurePropertiesNV( *reinterpret_cast<PhysicalDeviceClusterAccelerationStructurePropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceClusterAccelerationStructurePropertiesNV & operator=( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceClusterAccelerationStructurePropertiesNV & operator=( VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterAccelerationStructurePropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceClusterAccelerationStructurePropertiesNV*>( this );
}
operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceClusterAccelerationStructurePropertiesNV*>( this );
}
operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceClusterAccelerationStructurePropertiesNV*>( this );
}
operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceClusterAccelerationStructurePropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxVerticesPerCluster, maxTrianglesPerCluster, clusterScratchByteAlignment, clusterByteAlignment, clusterTemplateByteAlignment, clusterBottomLevelByteAlignment, clusterTemplateBoundsByteAlignment, maxClusterGeometryIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxVerticesPerCluster == rhs.maxVerticesPerCluster )
&& ( maxTrianglesPerCluster == rhs.maxTrianglesPerCluster )
&& ( clusterScratchByteAlignment == rhs.clusterScratchByteAlignment )
&& ( clusterByteAlignment == rhs.clusterByteAlignment )
&& ( clusterTemplateByteAlignment == rhs.clusterTemplateByteAlignment )
&& ( clusterBottomLevelByteAlignment == rhs.clusterBottomLevelByteAlignment )
&& ( clusterTemplateBoundsByteAlignment == rhs.clusterTemplateBoundsByteAlignment )
&& ( maxClusterGeometryIndex == rhs.maxClusterGeometryIndex );
#endif
}
bool operator!=( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV;
void * pNext = {};
uint32_t maxVerticesPerCluster = {};
uint32_t maxTrianglesPerCluster = {};
uint32_t clusterScratchByteAlignment = {};
uint32_t clusterByteAlignment = {};
uint32_t clusterTemplateByteAlignment = {};
uint32_t clusterBottomLevelByteAlignment = {};
uint32_t clusterTemplateBoundsByteAlignment = {};
uint32_t maxClusterGeometryIndex = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV>
{
using Type = PhysicalDeviceClusterAccelerationStructurePropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI.html
struct PhysicalDeviceClusterCullingShaderFeaturesHUAWEI
{
using NativeType = VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderFeaturesHUAWEI(VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, clustercullingShader{ clustercullingShader_ }, multiviewClusterCullingShader{ multiviewClusterCullingShader_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const *>( &rhs ) )
{}
PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setClustercullingShader( VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader_ ) VULKAN_HPP_NOEXCEPT
{
clustercullingShader = clustercullingShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setMultiviewClusterCullingShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader_ ) VULKAN_HPP_NOEXCEPT
{
multiviewClusterCullingShader = multiviewClusterCullingShader_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, clustercullingShader, multiviewClusterCullingShader );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & ) const = default;
#else
bool operator==( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( clustercullingShader == rhs.clustercullingShader )
&& ( multiviewClusterCullingShader == rhs.multiviewClusterCullingShader );
#endif
}
bool operator!=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader = {};
VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI>
{
using Type = PhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
};
// wrapper struct for struct VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI.html
struct PhysicalDeviceClusterCullingShaderPropertiesHUAWEI
{
using NativeType = VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderPropertiesHUAWEI(std::array<uint32_t,3> const & maxWorkGroupCount_ = {}, std::array<uint32_t,3> const & maxWorkGroupSize_ = {}, uint32_t maxOutputClusterCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indirectBufferOffsetAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxWorkGroupCount{ maxWorkGroupCount_ }, maxWorkGroupSize{ maxWorkGroupSize_ }, maxOutputClusterCount{ maxOutputClusterCount_ }, indirectBufferOffsetAlignment{ indirectBufferOffsetAlignment_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( *reinterpret_cast<PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const *>( &rhs ) )
{}
PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & operator=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI*>( this );
}
operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI*>( this );
}
operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI*>( this );
}
operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxWorkGroupCount, maxWorkGroupSize, maxOutputClusterCount, indirectBufferOffsetAlignment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & ) const = default;
#else
bool operator==( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxWorkGroupCount == rhs.maxWorkGroupCount )
&& ( maxWorkGroupSize == rhs.maxWorkGroupSize )
&& ( maxOutputClusterCount == rhs.maxOutputClusterCount )
&& ( indirectBufferOffsetAlignment == rhs.indirectBufferOffsetAlignment );
#endif
}
bool operator!=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxWorkGroupCount = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxWorkGroupSize = {};
uint32_t maxOutputClusterCount = {};
VULKAN_HPP_NAMESPACE::DeviceSize indirectBufferOffsetAlignment = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI>
{
using Type = PhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
};
// wrapper struct for struct VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.html
struct PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI
{
using NativeType = VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI(VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, clusterShadingRate{ clusterShadingRate_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const *>( &rhs ) )
{}
PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & operator=( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & setClusterShadingRate( VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate_ ) VULKAN_HPP_NOEXCEPT
{
clusterShadingRate = clusterShadingRate_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, clusterShadingRate );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & ) const = default;
#else
bool operator==( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( clusterShadingRate == rhs.clusterShadingRate );
#endif
}
bool operator!=( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI>
{
using Type = PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI;
};
// wrapper struct for struct VkPhysicalDeviceCoherentMemoryFeaturesAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCoherentMemoryFeaturesAMD.html
struct PhysicalDeviceCoherentMemoryFeaturesAMD
{
using NativeType = VkPhysicalDeviceCoherentMemoryFeaturesAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD(VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceCoherentMemory{ deviceCoherentMemory_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCoherentMemoryFeaturesAMD( *reinterpret_cast<PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs ) )
{}
PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
{
deviceCoherentMemory = deviceCoherentMemory_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
}
operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
}
operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
}
operator VkPhysicalDeviceCoherentMemoryFeaturesAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceCoherentMemory );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default;
#else
bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceCoherentMemory == rhs.deviceCoherentMemory );
#endif
}
bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD>
{
using Type = PhysicalDeviceCoherentMemoryFeaturesAMD;
};
// wrapper struct for struct VkPhysicalDeviceColorWriteEnableFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceColorWriteEnableFeaturesEXT.html
struct PhysicalDeviceColorWriteEnableFeaturesEXT
{
using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, colorWriteEnable{ colorWriteEnable_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceColorWriteEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setColorWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT
{
colorWriteEnable = colorWriteEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceColorWriteEnableFeaturesEXT*>( this );
}
operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceColorWriteEnableFeaturesEXT*>( this );
}
operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceColorWriteEnableFeaturesEXT*>( this );
}
operator VkPhysicalDeviceColorWriteEnableFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceColorWriteEnableFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, colorWriteEnable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( colorWriteEnable == rhs.colorWriteEnable );
#endif
}
bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT>
{
using Type = PhysicalDeviceColorWriteEnableFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceCommandBufferInheritanceFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCommandBufferInheritanceFeaturesNV.html
struct PhysicalDeviceCommandBufferInheritanceFeaturesNV
{
using NativeType = VkPhysicalDeviceCommandBufferInheritanceFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCommandBufferInheritanceFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 commandBufferInheritance_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, commandBufferInheritance{ commandBufferInheritance_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCommandBufferInheritanceFeaturesNV( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCommandBufferInheritanceFeaturesNV( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCommandBufferInheritanceFeaturesNV( *reinterpret_cast<PhysicalDeviceCommandBufferInheritanceFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceCommandBufferInheritanceFeaturesNV & operator=( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCommandBufferInheritanceFeaturesNV & operator=( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCommandBufferInheritanceFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV & setCommandBufferInheritance( VULKAN_HPP_NAMESPACE::Bool32 commandBufferInheritance_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferInheritance = commandBufferInheritance_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCommandBufferInheritanceFeaturesNV*>( this );
}
operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCommandBufferInheritanceFeaturesNV*>( this );
}
operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCommandBufferInheritanceFeaturesNV*>( this );
}
operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCommandBufferInheritanceFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, commandBufferInheritance );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( commandBufferInheritance == rhs.commandBufferInheritance );
#endif
}
bool operator!=( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 commandBufferInheritance = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV>
{
using Type = PhysicalDeviceCommandBufferInheritanceFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR.html
struct PhysicalDeviceComputeShaderDerivativesFeaturesKHR
{
using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, computeDerivativeGroupQuads{ computeDerivativeGroupQuads_ }, computeDerivativeGroupLinear{ computeDerivativeGroupLinear_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesKHR( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceComputeShaderDerivativesFeaturesKHR( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceComputeShaderDerivativesFeaturesKHR( *reinterpret_cast<PhysicalDeviceComputeShaderDerivativesFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceComputeShaderDerivativesFeaturesKHR & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceComputeShaderDerivativesFeaturesKHR & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
{
computeDerivativeGroupQuads = computeDerivativeGroupQuads_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
{
computeDerivativeGroupLinear = computeDerivativeGroupLinear_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR*>( this );
}
operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR*>( this );
}
operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR*>( this );
}
operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, computeDerivativeGroupQuads, computeDerivativeGroupLinear );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads )
&& ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear );
#endif
}
bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {};
VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR>
{
using Type = PhysicalDeviceComputeShaderDerivativesFeaturesKHR;
};
using PhysicalDeviceComputeShaderDerivativesFeaturesNV = PhysicalDeviceComputeShaderDerivativesFeaturesKHR;
// wrapper struct for struct VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR.html
struct PhysicalDeviceComputeShaderDerivativesPropertiesKHR
{
using NativeType = VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 meshAndTaskShaderDerivatives_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, meshAndTaskShaderDerivatives{ meshAndTaskShaderDerivatives_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesPropertiesKHR( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceComputeShaderDerivativesPropertiesKHR( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceComputeShaderDerivativesPropertiesKHR( *reinterpret_cast<PhysicalDeviceComputeShaderDerivativesPropertiesKHR const *>( &rhs ) )
{}
PhysicalDeviceComputeShaderDerivativesPropertiesKHR & operator=( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceComputeShaderDerivativesPropertiesKHR & operator=( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesPropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR*>( this );
}
operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR*>( this );
}
operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR*>( this );
}
operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, meshAndTaskShaderDerivatives );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( meshAndTaskShaderDerivatives == rhs.meshAndTaskShaderDerivatives );
#endif
}
bool operator!=( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 meshAndTaskShaderDerivatives = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR>
{
using Type = PhysicalDeviceComputeShaderDerivativesPropertiesKHR;
};
// wrapper struct for struct VkPhysicalDeviceConditionalRenderingFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceConditionalRenderingFeaturesEXT.html
struct PhysicalDeviceConditionalRenderingFeaturesEXT
{
using NativeType = VkPhysicalDeviceConditionalRenderingFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, conditionalRendering{ conditionalRendering_ }, inheritedConditionalRendering{ inheritedConditionalRendering_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceConditionalRenderingFeaturesEXT( *reinterpret_cast<PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
{
conditionalRendering = conditionalRendering_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
{
inheritedConditionalRendering = inheritedConditionalRendering_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
}
operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
}
operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
}
operator VkPhysicalDeviceConditionalRenderingFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, conditionalRendering, inheritedConditionalRendering );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( conditionalRendering == rhs.conditionalRendering )
&& ( inheritedConditionalRendering == rhs.inheritedConditionalRendering );
#endif
}
bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {};
VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT>
{
using Type = PhysicalDeviceConditionalRenderingFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceConservativeRasterizationPropertiesEXT.html
struct PhysicalDeviceConservativeRasterizationPropertiesEXT
{
using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(float primitiveOverestimationSize_ = {}, float maxExtraPrimitiveOverestimationSize_ = {}, float extraPrimitiveOverestimationSizeGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, primitiveOverestimationSize{ primitiveOverestimationSize_ }, maxExtraPrimitiveOverestimationSize{ maxExtraPrimitiveOverestimationSize_ }, extraPrimitiveOverestimationSizeGranularity{ extraPrimitiveOverestimationSizeGranularity_ }, primitiveUnderestimation{ primitiveUnderestimation_ }, conservativePointAndLineRasterization{ conservativePointAndLineRasterization_ }, degenerateTrianglesRasterized{ degenerateTrianglesRasterized_ }, degenerateLinesRasterized{ degenerateLinesRasterized_ }, fullyCoveredFragmentShaderInputVariable{ fullyCoveredFragmentShaderInputVariable_ }, conservativeRasterizationPostDepthCoverage{ conservativeRasterizationPostDepthCoverage_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceConservativeRasterizationPropertiesEXT( *reinterpret_cast<PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
}
operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
}
operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
}
operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, float const &, float const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, primitiveOverestimationSize, maxExtraPrimitiveOverestimationSize, extraPrimitiveOverestimationSizeGranularity, primitiveUnderestimation, conservativePointAndLineRasterization, degenerateTrianglesRasterized, degenerateLinesRasterized, fullyCoveredFragmentShaderInputVariable, conservativeRasterizationPostDepthCoverage );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( primitiveOverestimationSize == rhs.primitiveOverestimationSize )
&& ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize )
&& ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity )
&& ( primitiveUnderestimation == rhs.primitiveUnderestimation )
&& ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization )
&& ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized )
&& ( degenerateLinesRasterized == rhs.degenerateLinesRasterized )
&& ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable )
&& ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
#endif
}
bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
void * pNext = {};
float primitiveOverestimationSize = {};
float maxExtraPrimitiveOverestimationSize = {};
float extraPrimitiveOverestimationSizeGranularity = {};
VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {};
VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {};
VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {};
VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {};
VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {};
VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT>
{
using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceCooperativeMatrix2FeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrix2FeaturesNV.html
struct PhysicalDeviceCooperativeMatrix2FeaturesNV
{
using NativeType = VkPhysicalDeviceCooperativeMatrix2FeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cooperativeMatrixWorkgroupScope{ cooperativeMatrixWorkgroupScope_ }, cooperativeMatrixFlexibleDimensions{ cooperativeMatrixFlexibleDimensions_ }, cooperativeMatrixReductions{ cooperativeMatrixReductions_ }, cooperativeMatrixConversions{ cooperativeMatrixConversions_ }, cooperativeMatrixPerElementOperations{ cooperativeMatrixPerElementOperations_ }, cooperativeMatrixTensorAddressing{ cooperativeMatrixTensorAddressing_ }, cooperativeMatrixBlockLoads{ cooperativeMatrixBlockLoads_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCooperativeMatrix2FeaturesNV( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCooperativeMatrix2FeaturesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrix2FeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixWorkgroupScope( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrixWorkgroupScope = cooperativeMatrixWorkgroupScope_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixFlexibleDimensions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrixFlexibleDimensions = cooperativeMatrixFlexibleDimensions_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixReductions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrixReductions = cooperativeMatrixReductions_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixConversions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrixConversions = cooperativeMatrixConversions_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixPerElementOperations( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrixPerElementOperations = cooperativeMatrixPerElementOperations_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixTensorAddressing( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrixTensorAddressing = cooperativeMatrixTensorAddressing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixBlockLoads( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrixBlockLoads = cooperativeMatrixBlockLoads_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrix2FeaturesNV*>( this );
}
operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrix2FeaturesNV*>( this );
}
operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCooperativeMatrix2FeaturesNV*>( this );
}
operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCooperativeMatrix2FeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cooperativeMatrixWorkgroupScope, cooperativeMatrixFlexibleDimensions, cooperativeMatrixReductions, cooperativeMatrixConversions, cooperativeMatrixPerElementOperations, cooperativeMatrixTensorAddressing, cooperativeMatrixBlockLoads );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCooperativeMatrix2FeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cooperativeMatrixWorkgroupScope == rhs.cooperativeMatrixWorkgroupScope )
&& ( cooperativeMatrixFlexibleDimensions == rhs.cooperativeMatrixFlexibleDimensions )
&& ( cooperativeMatrixReductions == rhs.cooperativeMatrixReductions )
&& ( cooperativeMatrixConversions == rhs.cooperativeMatrixConversions )
&& ( cooperativeMatrixPerElementOperations == rhs.cooperativeMatrixPerElementOperations )
&& ( cooperativeMatrixTensorAddressing == rhs.cooperativeMatrixTensorAddressing )
&& ( cooperativeMatrixBlockLoads == rhs.cooperativeMatrixBlockLoads );
#endif
}
bool operator!=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV>
{
using Type = PhysicalDeviceCooperativeMatrix2FeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceCooperativeMatrix2PropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrix2PropertiesNV.html
struct PhysicalDeviceCooperativeMatrix2PropertiesNV
{
using NativeType = VkPhysicalDeviceCooperativeMatrix2PropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV(uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ = {}, uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension_ = {}, uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cooperativeMatrixWorkgroupScopeMaxWorkgroupSize{ cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ }, cooperativeMatrixFlexibleDimensionsMaxDimension{ cooperativeMatrixFlexibleDimensionsMaxDimension_ }, cooperativeMatrixWorkgroupScopeReservedSharedMemory{ cooperativeMatrixWorkgroupScopeReservedSharedMemory_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCooperativeMatrix2PropertiesNV( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCooperativeMatrix2PropertiesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrix2PropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrix2PropertiesNV*>( this );
}
operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrix2PropertiesNV*>( this );
}
operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCooperativeMatrix2PropertiesNV*>( this );
}
operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCooperativeMatrix2PropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cooperativeMatrixWorkgroupScopeMaxWorkgroupSize, cooperativeMatrixFlexibleDimensionsMaxDimension, cooperativeMatrixWorkgroupScopeReservedSharedMemory );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCooperativeMatrix2PropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cooperativeMatrixWorkgroupScopeMaxWorkgroupSize == rhs.cooperativeMatrixWorkgroupScopeMaxWorkgroupSize )
&& ( cooperativeMatrixFlexibleDimensionsMaxDimension == rhs.cooperativeMatrixFlexibleDimensionsMaxDimension )
&& ( cooperativeMatrixWorkgroupScopeReservedSharedMemory == rhs.cooperativeMatrixWorkgroupScopeReservedSharedMemory );
#endif
}
bool operator!=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV;
void * pNext = {};
uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize = {};
uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension = {};
uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV>
{
using Type = PhysicalDeviceCooperativeMatrix2PropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceCooperativeMatrixFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixFeaturesKHR.html
struct PhysicalDeviceCooperativeMatrixFeaturesKHR
{
using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cooperativeMatrix{ cooperativeMatrix_ }, cooperativeMatrixRobustBufferAccess{ cooperativeMatrixRobustBufferAccess_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCooperativeMatrixFeaturesKHR( VkPhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCooperativeMatrixFeaturesKHR( *reinterpret_cast<PhysicalDeviceCooperativeMatrixFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceCooperativeMatrixFeaturesKHR & operator=( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCooperativeMatrixFeaturesKHR & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrix = cooperativeMatrix_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesKHR*>( this );
}
operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesKHR*>( this );
}
operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesKHR*>( this );
}
operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cooperativeMatrix == rhs.cooperativeMatrix )
&& ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess );
#endif
}
bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR>
{
using Type = PhysicalDeviceCooperativeMatrixFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceCooperativeMatrixFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixFeaturesNV.html
struct PhysicalDeviceCooperativeMatrixFeaturesNV
{
using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cooperativeMatrix{ cooperativeMatrix_ }, cooperativeMatrixRobustBufferAccess{ cooperativeMatrixRobustBufferAccess_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCooperativeMatrixFeaturesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrix = cooperativeMatrix_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
}
operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
}
operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
}
operator VkPhysicalDeviceCooperativeMatrixFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cooperativeMatrix == rhs.cooperativeMatrix )
&& ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess );
#endif
}
bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV>
{
using Type = PhysicalDeviceCooperativeMatrixFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceCooperativeMatrixPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixPropertiesKHR.html
struct PhysicalDeviceCooperativeMatrixPropertiesKHR
{
using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR(VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cooperativeMatrixSupportedStages{ cooperativeMatrixSupportedStages_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCooperativeMatrixPropertiesKHR( *reinterpret_cast<PhysicalDeviceCooperativeMatrixPropertiesKHR const *>( &rhs ) )
{}
PhysicalDeviceCooperativeMatrixPropertiesKHR & operator=( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCooperativeMatrixPropertiesKHR & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesKHR*>( this );
}
operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesKHR*>( this );
}
operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesKHR*>( this );
}
operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cooperativeMatrixSupportedStages );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
#endif
}
bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR>
{
using Type = PhysicalDeviceCooperativeMatrixPropertiesKHR;
};
// wrapper struct for struct VkPhysicalDeviceCooperativeMatrixPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixPropertiesNV.html
struct PhysicalDeviceCooperativeMatrixPropertiesNV
{
using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cooperativeMatrixSupportedStages{ cooperativeMatrixSupportedStages_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCooperativeMatrixPropertiesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
}
operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
}
operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
}
operator VkPhysicalDeviceCooperativeMatrixPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cooperativeMatrixSupportedStages );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
#endif
}
bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV>
{
using Type = PhysicalDeviceCooperativeMatrixPropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceCooperativeVectorFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeVectorFeaturesNV.html
struct PhysicalDeviceCooperativeVectorFeaturesNV
{
using NativeType = VkPhysicalDeviceCooperativeVectorFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cooperativeVector_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTraining_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cooperativeVector{ cooperativeVector_ }, cooperativeVectorTraining{ cooperativeVectorTraining_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorFeaturesNV( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCooperativeVectorFeaturesNV( VkPhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCooperativeVectorFeaturesNV( *reinterpret_cast<PhysicalDeviceCooperativeVectorFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceCooperativeVectorFeaturesNV & operator=( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCooperativeVectorFeaturesNV & operator=( VkPhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeVectorFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & setCooperativeVector( VULKAN_HPP_NAMESPACE::Bool32 cooperativeVector_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeVector = cooperativeVector_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & setCooperativeVectorTraining( VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTraining_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeVectorTraining = cooperativeVectorTraining_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCooperativeVectorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCooperativeVectorFeaturesNV*>( this );
}
operator VkPhysicalDeviceCooperativeVectorFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCooperativeVectorFeaturesNV*>( this );
}
operator VkPhysicalDeviceCooperativeVectorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCooperativeVectorFeaturesNV*>( this );
}
operator VkPhysicalDeviceCooperativeVectorFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCooperativeVectorFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cooperativeVector, cooperativeVectorTraining );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCooperativeVectorFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cooperativeVector == rhs.cooperativeVector )
&& ( cooperativeVectorTraining == rhs.cooperativeVectorTraining );
#endif
}
bool operator!=( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeVector = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTraining = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV>
{
using Type = PhysicalDeviceCooperativeVectorFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceCooperativeVectorPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeVectorPropertiesNV.html
struct PhysicalDeviceCooperativeVectorPropertiesNV
{
using NativeType = VkPhysicalDeviceCooperativeVectorPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorPropertiesNV(VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeVectorSupportedStages_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTrainingFloat16Accumulation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTrainingFloat32Accumulation_ = {}, uint32_t maxCooperativeVectorComponents_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cooperativeVectorSupportedStages{ cooperativeVectorSupportedStages_ }, cooperativeVectorTrainingFloat16Accumulation{ cooperativeVectorTrainingFloat16Accumulation_ }, cooperativeVectorTrainingFloat32Accumulation{ cooperativeVectorTrainingFloat32Accumulation_ }, maxCooperativeVectorComponents{ maxCooperativeVectorComponents_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorPropertiesNV( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCooperativeVectorPropertiesNV( VkPhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCooperativeVectorPropertiesNV( *reinterpret_cast<PhysicalDeviceCooperativeVectorPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceCooperativeVectorPropertiesNV & operator=( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCooperativeVectorPropertiesNV & operator=( VkPhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeVectorPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceCooperativeVectorPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCooperativeVectorPropertiesNV*>( this );
}
operator VkPhysicalDeviceCooperativeVectorPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCooperativeVectorPropertiesNV*>( this );
}
operator VkPhysicalDeviceCooperativeVectorPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCooperativeVectorPropertiesNV*>( this );
}
operator VkPhysicalDeviceCooperativeVectorPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCooperativeVectorPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cooperativeVectorSupportedStages, cooperativeVectorTrainingFloat16Accumulation, cooperativeVectorTrainingFloat32Accumulation, maxCooperativeVectorComponents );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCooperativeVectorPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cooperativeVectorSupportedStages == rhs.cooperativeVectorSupportedStages )
&& ( cooperativeVectorTrainingFloat16Accumulation == rhs.cooperativeVectorTrainingFloat16Accumulation )
&& ( cooperativeVectorTrainingFloat32Accumulation == rhs.cooperativeVectorTrainingFloat32Accumulation )
&& ( maxCooperativeVectorComponents == rhs.maxCooperativeVectorComponents );
#endif
}
bool operator!=( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeVectorSupportedStages = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTrainingFloat16Accumulation = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTrainingFloat32Accumulation = {};
uint32_t maxCooperativeVectorComponents = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV>
{
using Type = PhysicalDeviceCooperativeVectorPropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceCopyMemoryIndirectFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCopyMemoryIndirectFeaturesNV.html
struct PhysicalDeviceCopyMemoryIndirectFeaturesNV
{
using NativeType = VkPhysicalDeviceCopyMemoryIndirectFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, indirectCopy{ indirectCopy_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCopyMemoryIndirectFeaturesNV( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCopyMemoryIndirectFeaturesNV( *reinterpret_cast<PhysicalDeviceCopyMemoryIndirectFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setIndirectCopy( VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ ) VULKAN_HPP_NOEXCEPT
{
indirectCopy = indirectCopy_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCopyMemoryIndirectFeaturesNV*>( this );
}
operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCopyMemoryIndirectFeaturesNV*>( this );
}
operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCopyMemoryIndirectFeaturesNV*>( this );
}
operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCopyMemoryIndirectFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, indirectCopy );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( indirectCopy == rhs.indirectCopy );
#endif
}
bool operator!=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 indirectCopy = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV>
{
using Type = PhysicalDeviceCopyMemoryIndirectFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceCopyMemoryIndirectPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCopyMemoryIndirectPropertiesNV.html
struct PhysicalDeviceCopyMemoryIndirectPropertiesNV
{
using NativeType = VkPhysicalDeviceCopyMemoryIndirectPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV(VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, supportedQueues{ supportedQueues_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCopyMemoryIndirectPropertiesNV( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCopyMemoryIndirectPropertiesNV( *reinterpret_cast<PhysicalDeviceCopyMemoryIndirectPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceCopyMemoryIndirectPropertiesNV & operator=( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCopyMemoryIndirectPropertiesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCopyMemoryIndirectPropertiesNV*>( this );
}
operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCopyMemoryIndirectPropertiesNV*>( this );
}
operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCopyMemoryIndirectPropertiesNV*>( this );
}
operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCopyMemoryIndirectPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::QueueFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, supportedQueues );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( supportedQueues == rhs.supportedQueues );
#endif
}
bool operator!=( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV>
{
using Type = PhysicalDeviceCopyMemoryIndirectPropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceCornerSampledImageFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCornerSampledImageFeaturesNV.html
struct PhysicalDeviceCornerSampledImageFeaturesNV
{
using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cornerSampledImage{ cornerSampledImage_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCornerSampledImageFeaturesNV( *reinterpret_cast<PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
{
cornerSampledImage = cornerSampledImage_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
}
operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
}
operator VkPhysicalDeviceCornerSampledImageFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
}
operator VkPhysicalDeviceCornerSampledImageFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cornerSampledImage );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cornerSampledImage == rhs.cornerSampledImage );
#endif
}
bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV>
{
using Type = PhysicalDeviceCornerSampledImageFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceCoverageReductionModeFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCoverageReductionModeFeaturesNV.html
struct PhysicalDeviceCoverageReductionModeFeaturesNV
{
using NativeType = VkPhysicalDeviceCoverageReductionModeFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, coverageReductionMode{ coverageReductionMode_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCoverageReductionModeFeaturesNV( *reinterpret_cast<PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
{
coverageReductionMode = coverageReductionMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
}
operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
}
operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
}
operator VkPhysicalDeviceCoverageReductionModeFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, coverageReductionMode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( coverageReductionMode == rhs.coverageReductionMode );
#endif
}
bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV>
{
using Type = PhysicalDeviceCoverageReductionModeFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceCubicClampFeaturesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCubicClampFeaturesQCOM.html
struct PhysicalDeviceCubicClampFeaturesQCOM
{
using NativeType = VkPhysicalDeviceCubicClampFeaturesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cubicRangeClamp{ cubicRangeClamp_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCubicClampFeaturesQCOM( VkPhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCubicClampFeaturesQCOM( *reinterpret_cast<PhysicalDeviceCubicClampFeaturesQCOM const *>( &rhs ) )
{}
PhysicalDeviceCubicClampFeaturesQCOM & operator=( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCubicClampFeaturesQCOM & operator=( VkPhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM & setCubicRangeClamp( VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp_ ) VULKAN_HPP_NOEXCEPT
{
cubicRangeClamp = cubicRangeClamp_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCubicClampFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCubicClampFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceCubicClampFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCubicClampFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceCubicClampFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCubicClampFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceCubicClampFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCubicClampFeaturesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cubicRangeClamp );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCubicClampFeaturesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cubicRangeClamp == rhs.cubicRangeClamp );
#endif
}
bool operator!=( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCubicClampFeaturesQCOM>
{
using Type = PhysicalDeviceCubicClampFeaturesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceCubicWeightsFeaturesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCubicWeightsFeaturesQCOM.html
struct PhysicalDeviceCubicWeightsFeaturesQCOM
{
using NativeType = VkPhysicalDeviceCubicWeightsFeaturesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, selectableCubicWeights{ selectableCubicWeights_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCubicWeightsFeaturesQCOM( VkPhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCubicWeightsFeaturesQCOM( *reinterpret_cast<PhysicalDeviceCubicWeightsFeaturesQCOM const *>( &rhs ) )
{}
PhysicalDeviceCubicWeightsFeaturesQCOM & operator=( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCubicWeightsFeaturesQCOM & operator=( VkPhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM & setSelectableCubicWeights( VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights_ ) VULKAN_HPP_NOEXCEPT
{
selectableCubicWeights = selectableCubicWeights_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCubicWeightsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCubicWeightsFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceCubicWeightsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCubicWeightsFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceCubicWeightsFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCubicWeightsFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceCubicWeightsFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCubicWeightsFeaturesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, selectableCubicWeights );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCubicWeightsFeaturesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( selectableCubicWeights == rhs.selectableCubicWeights );
#endif
}
bool operator!=( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM>
{
using Type = PhysicalDeviceCubicWeightsFeaturesQCOM;
};
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkPhysicalDeviceCudaKernelLaunchFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCudaKernelLaunchFeaturesNV.html
struct PhysicalDeviceCudaKernelLaunchFeaturesNV
{
using NativeType = VkPhysicalDeviceCudaKernelLaunchFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cudaKernelLaunchFeatures{ cudaKernelLaunchFeatures_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCudaKernelLaunchFeaturesNV( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCudaKernelLaunchFeaturesNV( *reinterpret_cast<PhysicalDeviceCudaKernelLaunchFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & setCudaKernelLaunchFeatures( VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures_ ) VULKAN_HPP_NOEXCEPT
{
cudaKernelLaunchFeatures = cudaKernelLaunchFeatures_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCudaKernelLaunchFeaturesNV*>( this );
}
operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCudaKernelLaunchFeaturesNV*>( this );
}
operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCudaKernelLaunchFeaturesNV*>( this );
}
operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCudaKernelLaunchFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cudaKernelLaunchFeatures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCudaKernelLaunchFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cudaKernelLaunchFeatures == rhs.cudaKernelLaunchFeatures );
#endif
}
bool operator!=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV>
{
using Type = PhysicalDeviceCudaKernelLaunchFeaturesNV;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkPhysicalDeviceCudaKernelLaunchPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCudaKernelLaunchPropertiesNV.html
struct PhysicalDeviceCudaKernelLaunchPropertiesNV
{
using NativeType = VkPhysicalDeviceCudaKernelLaunchPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV(uint32_t computeCapabilityMinor_ = {}, uint32_t computeCapabilityMajor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, computeCapabilityMinor{ computeCapabilityMinor_ }, computeCapabilityMajor{ computeCapabilityMajor_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCudaKernelLaunchPropertiesNV( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCudaKernelLaunchPropertiesNV( *reinterpret_cast<PhysicalDeviceCudaKernelLaunchPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCudaKernelLaunchPropertiesNV*>( this );
}
operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCudaKernelLaunchPropertiesNV*>( this );
}
operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCudaKernelLaunchPropertiesNV*>( this );
}
operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCudaKernelLaunchPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, computeCapabilityMinor, computeCapabilityMajor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCudaKernelLaunchPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( computeCapabilityMinor == rhs.computeCapabilityMinor )
&& ( computeCapabilityMajor == rhs.computeCapabilityMajor );
#endif
}
bool operator!=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV;
void * pNext = {};
uint32_t computeCapabilityMinor = {};
uint32_t computeCapabilityMajor = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV>
{
using Type = PhysicalDeviceCudaKernelLaunchPropertiesNV;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
// wrapper struct for struct VkPhysicalDeviceCustomBorderColorFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCustomBorderColorFeaturesEXT.html
struct PhysicalDeviceCustomBorderColorFeaturesEXT
{
using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, customBorderColors{ customBorderColors_ }, customBorderColorWithoutFormat{ customBorderColorWithoutFormat_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCustomBorderColorFeaturesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT
{
customBorderColors = customBorderColors_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
{
customBorderColorWithoutFormat = customBorderColorWithoutFormat_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
}
operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
}
operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
}
operator VkPhysicalDeviceCustomBorderColorFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, customBorderColors, customBorderColorWithoutFormat );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( customBorderColors == rhs.customBorderColors )
&& ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat );
#endif
}
bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {};
VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT>
{
using Type = PhysicalDeviceCustomBorderColorFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceCustomBorderColorPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCustomBorderColorPropertiesEXT.html
struct PhysicalDeviceCustomBorderColorPropertiesEXT
{
using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT(uint32_t maxCustomBorderColorSamplers_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxCustomBorderColorSamplers{ maxCustomBorderColorSamplers_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceCustomBorderColorPropertiesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
}
operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
}
operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
}
operator VkPhysicalDeviceCustomBorderColorPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxCustomBorderColorSamplers );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers );
#endif
}
bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
void * pNext = {};
uint32_t maxCustomBorderColorSamplers = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT>
{
using Type = PhysicalDeviceCustomBorderColorPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV.html
struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
{
using NativeType = VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dedicatedAllocationImageAliasing{ dedicatedAllocationImageAliasing_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( *reinterpret_cast<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT
{
dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
}
operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
}
operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
}
operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dedicatedAllocationImageAliasing );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing );
#endif
}
bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>
{
using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceDepthBiasControlFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthBiasControlFeaturesEXT.html
struct PhysicalDeviceDepthBiasControlFeaturesEXT
{
using NativeType = VkPhysicalDeviceDepthBiasControlFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthBiasControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBiasControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 leastRepresentableValueForceUnormRepresentation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, depthBiasControl{ depthBiasControl_ }, leastRepresentableValueForceUnormRepresentation{ leastRepresentableValueForceUnormRepresentation_ }, floatRepresentation{ floatRepresentation_ }, depthBiasExact{ depthBiasExact_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthBiasControlFeaturesEXT( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDepthBiasControlFeaturesEXT( VkPhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDepthBiasControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthBiasControlFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceDepthBiasControlFeaturesEXT & operator=( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDepthBiasControlFeaturesEXT & operator=( VkPhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setDepthBiasControl( VULKAN_HPP_NAMESPACE::Bool32 depthBiasControl_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasControl = depthBiasControl_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setLeastRepresentableValueForceUnormRepresentation( VULKAN_HPP_NAMESPACE::Bool32 leastRepresentableValueForceUnormRepresentation_ ) VULKAN_HPP_NOEXCEPT
{
leastRepresentableValueForceUnormRepresentation = leastRepresentableValueForceUnormRepresentation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setFloatRepresentation( VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation_ ) VULKAN_HPP_NOEXCEPT
{
floatRepresentation = floatRepresentation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setDepthBiasExact( VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasExact = depthBiasExact_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDepthBiasControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDepthBiasControlFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDepthBiasControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDepthBiasControlFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDepthBiasControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDepthBiasControlFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDepthBiasControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDepthBiasControlFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, depthBiasControl, leastRepresentableValueForceUnormRepresentation, floatRepresentation, depthBiasExact );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDepthBiasControlFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( depthBiasControl == rhs.depthBiasControl )
&& ( leastRepresentableValueForceUnormRepresentation == rhs.leastRepresentableValueForceUnormRepresentation )
&& ( floatRepresentation == rhs.floatRepresentation )
&& ( depthBiasExact == rhs.depthBiasExact );
#endif
}
bool operator!=( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 depthBiasControl = {};
VULKAN_HPP_NAMESPACE::Bool32 leastRepresentableValueForceUnormRepresentation = {};
VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation = {};
VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT>
{
using Type = PhysicalDeviceDepthBiasControlFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceDepthClampControlFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClampControlFeaturesEXT.html
struct PhysicalDeviceDepthClampControlFeaturesEXT
{
using NativeType = VkPhysicalDeviceDepthClampControlFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClampControl_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, depthClampControl{ depthClampControl_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampControlFeaturesEXT( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDepthClampControlFeaturesEXT( VkPhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDepthClampControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClampControlFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceDepthClampControlFeaturesEXT & operator=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDepthClampControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampControlFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT & setDepthClampControl( VULKAN_HPP_NAMESPACE::Bool32 depthClampControl_ ) VULKAN_HPP_NOEXCEPT
{
depthClampControl = depthClampControl_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDepthClampControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDepthClampControlFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDepthClampControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDepthClampControlFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDepthClampControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDepthClampControlFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDepthClampControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDepthClampControlFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, depthClampControl );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDepthClampControlFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( depthClampControl == rhs.depthClampControl );
#endif
}
bool operator!=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClampControl = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT>
{
using Type = PhysicalDeviceDepthClampControlFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceDepthClampZeroOneFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClampZeroOneFeaturesKHR.html
struct PhysicalDeviceDepthClampZeroOneFeaturesKHR
{
using NativeType = VkPhysicalDeviceDepthClampZeroOneFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, depthClampZeroOne{ depthClampZeroOne_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesKHR( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDepthClampZeroOneFeaturesKHR( VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDepthClampZeroOneFeaturesKHR( *reinterpret_cast<PhysicalDeviceDepthClampZeroOneFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceDepthClampZeroOneFeaturesKHR & operator=( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDepthClampZeroOneFeaturesKHR & operator=( VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesKHR & setDepthClampZeroOne( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ ) VULKAN_HPP_NOEXCEPT
{
depthClampZeroOne = depthClampZeroOne_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDepthClampZeroOneFeaturesKHR*>( this );
}
operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDepthClampZeroOneFeaturesKHR*>( this );
}
operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDepthClampZeroOneFeaturesKHR*>( this );
}
operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDepthClampZeroOneFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, depthClampZeroOne );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( depthClampZeroOne == rhs.depthClampZeroOne );
#endif
}
bool operator!=( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR>
{
using Type = PhysicalDeviceDepthClampZeroOneFeaturesKHR;
};
using PhysicalDeviceDepthClampZeroOneFeaturesEXT = PhysicalDeviceDepthClampZeroOneFeaturesKHR;
// wrapper struct for struct VkPhysicalDeviceDepthClipControlFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClipControlFeaturesEXT.html
struct PhysicalDeviceDepthClipControlFeaturesEXT
{
using NativeType = VkPhysicalDeviceDepthClipControlFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, depthClipControl{ depthClipControl_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDepthClipControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClipControlFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceDepthClipControlFeaturesEXT & operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDepthClipControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setDepthClipControl( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ ) VULKAN_HPP_NOEXCEPT
{
depthClipControl = depthClipControl_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDepthClipControlFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDepthClipControlFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDepthClipControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDepthClipControlFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDepthClipControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDepthClipControlFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, depthClipControl );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDepthClipControlFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( depthClipControl == rhs.depthClipControl );
#endif
}
bool operator!=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClipControl = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT>
{
using Type = PhysicalDeviceDepthClipControlFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceDepthClipEnableFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClipEnableFeaturesEXT.html
struct PhysicalDeviceDepthClipEnableFeaturesEXT
{
using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, depthClipEnable{ depthClipEnable_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDepthClipEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthClipEnable = depthClipEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDepthClipEnableFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, depthClipEnable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( depthClipEnable == rhs.depthClipEnable );
#endif
}
bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT>
{
using Type = PhysicalDeviceDepthClipEnableFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceDepthStencilResolveProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthStencilResolveProperties.html
struct PhysicalDeviceDepthStencilResolveProperties
{
using NativeType = VkPhysicalDeviceDepthStencilResolveProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, supportedDepthResolveModes{ supportedDepthResolveModes_ }, supportedStencilResolveModes{ supportedStencilResolveModes_ }, independentResolveNone{ independentResolveNone_ }, independentResolve{ independentResolve_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDepthStencilResolveProperties( *reinterpret_cast<PhysicalDeviceDepthStencilResolveProperties const *>( &rhs ) )
{}
PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties*>( this );
}
operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties*>( this );
}
operator VkPhysicalDeviceDepthStencilResolveProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties*>( this );
}
operator VkPhysicalDeviceDepthStencilResolveProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
&& ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
&& ( independentResolveNone == rhs.independentResolveNone )
&& ( independentResolve == rhs.independentResolve );
#endif
}
bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthStencilResolveProperties>
{
using Type = PhysicalDeviceDepthStencilResolveProperties;
};
using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties;
// wrapper struct for struct VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT.html
struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT
{
using NativeType = VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT(size_t combinedImageSamplerDensityMapDescriptorSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, combinedImageSamplerDensityMapDescriptorSize{ combinedImageSamplerDensityMapDescriptorSize_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( *reinterpret_cast<PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & operator=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT*>( this );
}
operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT*>( this );
}
operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT*>( this );
}
operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, size_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, combinedImageSamplerDensityMapDescriptorSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( combinedImageSamplerDensityMapDescriptorSize == rhs.combinedImageSamplerDensityMapDescriptorSize );
#endif
}
bool operator!=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
void * pNext = {};
size_t combinedImageSamplerDensityMapDescriptorSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT>
{
using Type = PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceDescriptorBufferFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferFeaturesEXT.html
struct PhysicalDeviceDescriptorBufferFeaturesEXT
{
using NativeType = VkPhysicalDeviceDescriptorBufferFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, descriptorBuffer{ descriptorBuffer_ }, descriptorBufferCaptureReplay{ descriptorBufferCaptureReplay_ }, descriptorBufferImageLayoutIgnored{ descriptorBufferImageLayoutIgnored_ }, descriptorBufferPushDescriptors{ descriptorBufferPushDescriptors_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDescriptorBufferFeaturesEXT( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDescriptorBufferFeaturesEXT( *reinterpret_cast<PhysicalDeviceDescriptorBufferFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBuffer( VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBuffer = descriptorBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBufferCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBufferCaptureReplay = descriptorBufferCaptureReplay_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBufferImageLayoutIgnored( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBufferImageLayoutIgnored = descriptorBufferImageLayoutIgnored_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBufferPushDescriptors( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBufferPushDescriptors = descriptorBufferPushDescriptors_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDescriptorBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDescriptorBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDescriptorBufferFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDescriptorBufferFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDescriptorBufferFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDescriptorBufferFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, descriptorBuffer, descriptorBufferCaptureReplay, descriptorBufferImageLayoutIgnored, descriptorBufferPushDescriptors );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDescriptorBufferFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( descriptorBuffer == rhs.descriptorBuffer )
&& ( descriptorBufferCaptureReplay == rhs.descriptorBufferCaptureReplay )
&& ( descriptorBufferImageLayoutIgnored == rhs.descriptorBufferImageLayoutIgnored )
&& ( descriptorBufferPushDescriptors == rhs.descriptorBufferPushDescriptors );
#endif
}
bool operator!=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT>
{
using Type = PhysicalDeviceDescriptorBufferFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceDescriptorBufferPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferPropertiesEXT.html
struct PhysicalDeviceDescriptorBufferPropertiesEXT
{
using NativeType = VkPhysicalDeviceDescriptorBufferPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 combinedImageSamplerDescriptorSingleArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferlessPushDescriptors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 allowSamplerImageViewPostSubmitCreation_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferOffsetAlignment_ = {}, uint32_t maxDescriptorBufferBindings_ = {}, uint32_t maxResourceDescriptorBufferBindings_ = {}, uint32_t maxSamplerDescriptorBufferBindings_ = {}, uint32_t maxEmbeddedImmutableSamplerBindings_ = {}, uint32_t maxEmbeddedImmutableSamplers_ = {}, size_t bufferCaptureReplayDescriptorDataSize_ = {}, size_t imageCaptureReplayDescriptorDataSize_ = {}, size_t imageViewCaptureReplayDescriptorDataSize_ = {}, size_t samplerCaptureReplayDescriptorDataSize_ = {}, size_t accelerationStructureCaptureReplayDescriptorDataSize_ = {}, size_t samplerDescriptorSize_ = {}, size_t combinedImageSamplerDescriptorSize_ = {}, size_t sampledImageDescriptorSize_ = {}, size_t storageImageDescriptorSize_ = {}, size_t uniformTexelBufferDescriptorSize_ = {}, size_t robustUniformTexelBufferDescriptorSize_ = {}, size_t storageTexelBufferDescriptorSize_ = {}, size_t robustStorageTexelBufferDescriptorSize_ = {}, size_t uniformBufferDescriptorSize_ = {}, size_t robustUniformBufferDescriptorSize_ = {}, size_t storageBufferDescriptorSize_ = {}, size_t robustStorageBufferDescriptorSize_ = {}, size_t inputAttachmentDescriptorSize_ = {}, size_t accelerationStructureDescriptorSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxSamplerDescriptorBufferRange_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxResourceDescriptorBufferRange_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize samplerDescriptorBufferAddressSpaceSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize resourceDescriptorBufferAddressSpaceSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferAddressSpaceSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, combinedImageSamplerDescriptorSingleArray{ combinedImageSamplerDescriptorSingleArray_ }, bufferlessPushDescriptors{ bufferlessPushDescriptors_ }, allowSamplerImageViewPostSubmitCreation{ allowSamplerImageViewPostSubmitCreation_ }, descriptorBufferOffsetAlignment{ descriptorBufferOffsetAlignment_ }, maxDescriptorBufferBindings{ maxDescriptorBufferBindings_ }, maxResourceDescriptorBufferBindings{ maxResourceDescriptorBufferBindings_ }, maxSamplerDescriptorBufferBindings{ maxSamplerDescriptorBufferBindings_ }, maxEmbeddedImmutableSamplerBindings{ maxEmbeddedImmutableSamplerBindings_ }, maxEmbeddedImmutableSamplers{ maxEmbeddedImmutableSamplers_ }, bufferCaptureReplayDescriptorDataSize{ bufferCaptureReplayDescriptorDataSize_ }, imageCaptureReplayDescriptorDataSize{ imageCaptureReplayDescriptorDataSize_ }, imageViewCaptureReplayDescriptorDataSize{ imageViewCaptureReplayDescriptorDataSize_ }, samplerCaptureReplayDescriptorDataSize{ samplerCaptureReplayDescriptorDataSize_ }, accelerationStructureCaptureReplayDescriptorDataSize{ accelerationStructureCaptureReplayDescriptorDataSize_ }, samplerDescriptorSize{ samplerDescriptorSize_ }, combinedImageSamplerDescriptorSize{ combinedImageSamplerDescriptorSize_ }, sampledImageDescriptorSize{ sampledImageDescriptorSize_ }, storageImageDescriptorSize{ storageImageDescriptorSize_ }, uniformTexelBufferDescriptorSize{ uniformTexelBufferDescriptorSize_ }, robustUniformTexelBufferDescriptorSize{ robustUniformTexelBufferDescriptorSize_ }, storageTexelBufferDescriptorSize{ storageTexelBufferDescriptorSize_ }, robustStorageTexelBufferDescriptorSize{ robustStorageTexelBufferDescriptorSize_ }, uniformBufferDescriptorSize{ uniformBufferDescriptorSize_ }, robustUniformBufferDescriptorSize{ robustUniformBufferDescriptorSize_ }, storageBufferDescriptorSize{ storageBufferDescriptorSize_ }, robustStorageBufferDescriptorSize{ robustStorageBufferDescriptorSize_ }, inputAttachmentDescriptorSize{ inputAttachmentDescriptorSize_ }, accelerationStructureDescriptorSize{ accelerationStructureDescriptorSize_ }, maxSamplerDescriptorBufferRange{ maxSamplerDescriptorBufferRange_ }, maxResourceDescriptorBufferRange{ maxResourceDescriptorBufferRange_ }, samplerDescriptorBufferAddressSpaceSize{ samplerDescriptorBufferAddressSpaceSize_ }, resourceDescriptorBufferAddressSpaceSize{ resourceDescriptorBufferAddressSpaceSize_ }, descriptorBufferAddressSpaceSize{ descriptorBufferAddressSpaceSize_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDescriptorBufferPropertiesEXT( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDescriptorBufferPropertiesEXT( *reinterpret_cast<PhysicalDeviceDescriptorBufferPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceDescriptorBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferPropertiesEXT*>( this );
}
operator VkPhysicalDeviceDescriptorBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferPropertiesEXT*>( this );
}
operator VkPhysicalDeviceDescriptorBufferPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDescriptorBufferPropertiesEXT*>( this );
}
operator VkPhysicalDeviceDescriptorBufferPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDescriptorBufferPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, size_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, combinedImageSamplerDescriptorSingleArray, bufferlessPushDescriptors, allowSamplerImageViewPostSubmitCreation, descriptorBufferOffsetAlignment, maxDescriptorBufferBindings, maxResourceDescriptorBufferBindings, maxSamplerDescriptorBufferBindings, maxEmbeddedImmutableSamplerBindings, maxEmbeddedImmutableSamplers, bufferCaptureReplayDescriptorDataSize, imageCaptureReplayDescriptorDataSize, imageViewCaptureReplayDescriptorDataSize, samplerCaptureReplayDescriptorDataSize, accelerationStructureCaptureReplayDescriptorDataSize, samplerDescriptorSize, combinedImageSamplerDescriptorSize, sampledImageDescriptorSize, storageImageDescriptorSize, uniformTexelBufferDescriptorSize, robustUniformTexelBufferDescriptorSize, storageTexelBufferDescriptorSize, robustStorageTexelBufferDescriptorSize, uniformBufferDescriptorSize, robustUniformBufferDescriptorSize, storageBufferDescriptorSize, robustStorageBufferDescriptorSize, inputAttachmentDescriptorSize, accelerationStructureDescriptorSize, maxSamplerDescriptorBufferRange, maxResourceDescriptorBufferRange, samplerDescriptorBufferAddressSpaceSize, resourceDescriptorBufferAddressSpaceSize, descriptorBufferAddressSpaceSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDescriptorBufferPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( combinedImageSamplerDescriptorSingleArray == rhs.combinedImageSamplerDescriptorSingleArray )
&& ( bufferlessPushDescriptors == rhs.bufferlessPushDescriptors )
&& ( allowSamplerImageViewPostSubmitCreation == rhs.allowSamplerImageViewPostSubmitCreation )
&& ( descriptorBufferOffsetAlignment == rhs.descriptorBufferOffsetAlignment )
&& ( maxDescriptorBufferBindings == rhs.maxDescriptorBufferBindings )
&& ( maxResourceDescriptorBufferBindings == rhs.maxResourceDescriptorBufferBindings )
&& ( maxSamplerDescriptorBufferBindings == rhs.maxSamplerDescriptorBufferBindings )
&& ( maxEmbeddedImmutableSamplerBindings == rhs.maxEmbeddedImmutableSamplerBindings )
&& ( maxEmbeddedImmutableSamplers == rhs.maxEmbeddedImmutableSamplers )
&& ( bufferCaptureReplayDescriptorDataSize == rhs.bufferCaptureReplayDescriptorDataSize )
&& ( imageCaptureReplayDescriptorDataSize == rhs.imageCaptureReplayDescriptorDataSize )
&& ( imageViewCaptureReplayDescriptorDataSize == rhs.imageViewCaptureReplayDescriptorDataSize )
&& ( samplerCaptureReplayDescriptorDataSize == rhs.samplerCaptureReplayDescriptorDataSize )
&& ( accelerationStructureCaptureReplayDescriptorDataSize == rhs.accelerationStructureCaptureReplayDescriptorDataSize )
&& ( samplerDescriptorSize == rhs.samplerDescriptorSize )
&& ( combinedImageSamplerDescriptorSize == rhs.combinedImageSamplerDescriptorSize )
&& ( sampledImageDescriptorSize == rhs.sampledImageDescriptorSize )
&& ( storageImageDescriptorSize == rhs.storageImageDescriptorSize )
&& ( uniformTexelBufferDescriptorSize == rhs.uniformTexelBufferDescriptorSize )
&& ( robustUniformTexelBufferDescriptorSize == rhs.robustUniformTexelBufferDescriptorSize )
&& ( storageTexelBufferDescriptorSize == rhs.storageTexelBufferDescriptorSize )
&& ( robustStorageTexelBufferDescriptorSize == rhs.robustStorageTexelBufferDescriptorSize )
&& ( uniformBufferDescriptorSize == rhs.uniformBufferDescriptorSize )
&& ( robustUniformBufferDescriptorSize == rhs.robustUniformBufferDescriptorSize )
&& ( storageBufferDescriptorSize == rhs.storageBufferDescriptorSize )
&& ( robustStorageBufferDescriptorSize == rhs.robustStorageBufferDescriptorSize )
&& ( inputAttachmentDescriptorSize == rhs.inputAttachmentDescriptorSize )
&& ( accelerationStructureDescriptorSize == rhs.accelerationStructureDescriptorSize )
&& ( maxSamplerDescriptorBufferRange == rhs.maxSamplerDescriptorBufferRange )
&& ( maxResourceDescriptorBufferRange == rhs.maxResourceDescriptorBufferRange )
&& ( samplerDescriptorBufferAddressSpaceSize == rhs.samplerDescriptorBufferAddressSpaceSize )
&& ( resourceDescriptorBufferAddressSpaceSize == rhs.resourceDescriptorBufferAddressSpaceSize )
&& ( descriptorBufferAddressSpaceSize == rhs.descriptorBufferAddressSpaceSize );
#endif
}
bool operator!=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 combinedImageSamplerDescriptorSingleArray = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferlessPushDescriptors = {};
VULKAN_HPP_NAMESPACE::Bool32 allowSamplerImageViewPostSubmitCreation = {};
VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferOffsetAlignment = {};
uint32_t maxDescriptorBufferBindings = {};
uint32_t maxResourceDescriptorBufferBindings = {};
uint32_t maxSamplerDescriptorBufferBindings = {};
uint32_t maxEmbeddedImmutableSamplerBindings = {};
uint32_t maxEmbeddedImmutableSamplers = {};
size_t bufferCaptureReplayDescriptorDataSize = {};
size_t imageCaptureReplayDescriptorDataSize = {};
size_t imageViewCaptureReplayDescriptorDataSize = {};
size_t samplerCaptureReplayDescriptorDataSize = {};
size_t accelerationStructureCaptureReplayDescriptorDataSize = {};
size_t samplerDescriptorSize = {};
size_t combinedImageSamplerDescriptorSize = {};
size_t sampledImageDescriptorSize = {};
size_t storageImageDescriptorSize = {};
size_t uniformTexelBufferDescriptorSize = {};
size_t robustUniformTexelBufferDescriptorSize = {};
size_t storageTexelBufferDescriptorSize = {};
size_t robustStorageTexelBufferDescriptorSize = {};
size_t uniformBufferDescriptorSize = {};
size_t robustUniformBufferDescriptorSize = {};
size_t storageBufferDescriptorSize = {};
size_t robustStorageBufferDescriptorSize = {};
size_t inputAttachmentDescriptorSize = {};
size_t accelerationStructureDescriptorSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxSamplerDescriptorBufferRange = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxResourceDescriptorBufferRange = {};
VULKAN_HPP_NAMESPACE::DeviceSize samplerDescriptorBufferAddressSpaceSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize resourceDescriptorBufferAddressSpaceSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferAddressSpaceSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT>
{
using Type = PhysicalDeviceDescriptorBufferPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceDescriptorBufferTensorFeaturesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferTensorFeaturesARM.html
struct PhysicalDeviceDescriptorBufferTensorFeaturesARM
{
using NativeType = VkPhysicalDeviceDescriptorBufferTensorFeaturesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferTensorFeaturesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferTensorFeaturesARM(VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferTensorDescriptors_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, descriptorBufferTensorDescriptors{ descriptorBufferTensorDescriptors_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferTensorFeaturesARM( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDescriptorBufferTensorFeaturesARM( VkPhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDescriptorBufferTensorFeaturesARM( *reinterpret_cast<PhysicalDeviceDescriptorBufferTensorFeaturesARM const *>( &rhs ) )
{}
PhysicalDeviceDescriptorBufferTensorFeaturesARM & operator=( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDescriptorBufferTensorFeaturesARM & operator=( VkPhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorFeaturesARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorFeaturesARM & setDescriptorBufferTensorDescriptors( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferTensorDescriptors_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBufferTensorDescriptors = descriptorBufferTensorDescriptors_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDescriptorBufferTensorFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferTensorFeaturesARM*>( this );
}
operator VkPhysicalDeviceDescriptorBufferTensorFeaturesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferTensorFeaturesARM*>( this );
}
operator VkPhysicalDeviceDescriptorBufferTensorFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDescriptorBufferTensorFeaturesARM*>( this );
}
operator VkPhysicalDeviceDescriptorBufferTensorFeaturesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDescriptorBufferTensorFeaturesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, descriptorBufferTensorDescriptors );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & ) const = default;
#else
bool operator==( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( descriptorBufferTensorDescriptors == rhs.descriptorBufferTensorDescriptors );
#endif
}
bool operator!=( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferTensorFeaturesARM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferTensorDescriptors = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferTensorFeaturesARM>
{
using Type = PhysicalDeviceDescriptorBufferTensorFeaturesARM;
};
// wrapper struct for struct VkPhysicalDeviceDescriptorBufferTensorPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferTensorPropertiesARM.html
struct PhysicalDeviceDescriptorBufferTensorPropertiesARM
{
using NativeType = VkPhysicalDeviceDescriptorBufferTensorPropertiesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferTensorPropertiesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferTensorPropertiesARM(size_t tensorCaptureReplayDescriptorDataSize_ = {}, size_t tensorViewCaptureReplayDescriptorDataSize_ = {}, size_t tensorDescriptorSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tensorCaptureReplayDescriptorDataSize{ tensorCaptureReplayDescriptorDataSize_ }, tensorViewCaptureReplayDescriptorDataSize{ tensorViewCaptureReplayDescriptorDataSize_ }, tensorDescriptorSize{ tensorDescriptorSize_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferTensorPropertiesARM( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDescriptorBufferTensorPropertiesARM( VkPhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDescriptorBufferTensorPropertiesARM( *reinterpret_cast<PhysicalDeviceDescriptorBufferTensorPropertiesARM const *>( &rhs ) )
{}
PhysicalDeviceDescriptorBufferTensorPropertiesARM & operator=( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDescriptorBufferTensorPropertiesARM & operator=( VkPhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorPropertiesARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM & setTensorCaptureReplayDescriptorDataSize( size_t tensorCaptureReplayDescriptorDataSize_ ) VULKAN_HPP_NOEXCEPT
{
tensorCaptureReplayDescriptorDataSize = tensorCaptureReplayDescriptorDataSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM & setTensorViewCaptureReplayDescriptorDataSize( size_t tensorViewCaptureReplayDescriptorDataSize_ ) VULKAN_HPP_NOEXCEPT
{
tensorViewCaptureReplayDescriptorDataSize = tensorViewCaptureReplayDescriptorDataSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM & setTensorDescriptorSize( size_t tensorDescriptorSize_ ) VULKAN_HPP_NOEXCEPT
{
tensorDescriptorSize = tensorDescriptorSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDescriptorBufferTensorPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferTensorPropertiesARM*>( this );
}
operator VkPhysicalDeviceDescriptorBufferTensorPropertiesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferTensorPropertiesARM*>( this );
}
operator VkPhysicalDeviceDescriptorBufferTensorPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDescriptorBufferTensorPropertiesARM*>( this );
}
operator VkPhysicalDeviceDescriptorBufferTensorPropertiesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDescriptorBufferTensorPropertiesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, size_t const &, size_t const &, size_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tensorCaptureReplayDescriptorDataSize, tensorViewCaptureReplayDescriptorDataSize, tensorDescriptorSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & ) const = default;
#else
bool operator==( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tensorCaptureReplayDescriptorDataSize == rhs.tensorCaptureReplayDescriptorDataSize )
&& ( tensorViewCaptureReplayDescriptorDataSize == rhs.tensorViewCaptureReplayDescriptorDataSize )
&& ( tensorDescriptorSize == rhs.tensorDescriptorSize );
#endif
}
bool operator!=( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferTensorPropertiesARM;
const void * pNext = {};
size_t tensorCaptureReplayDescriptorDataSize = {};
size_t tensorViewCaptureReplayDescriptorDataSize = {};
size_t tensorDescriptorSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferTensorPropertiesARM>
{
using Type = PhysicalDeviceDescriptorBufferTensorPropertiesARM;
};
// wrapper struct for struct VkPhysicalDeviceDescriptorIndexingFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorIndexingFeatures.html
struct PhysicalDeviceDescriptorIndexingFeatures
{
using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderInputAttachmentArrayDynamicIndexing{ shaderInputAttachmentArrayDynamicIndexing_ }, shaderUniformTexelBufferArrayDynamicIndexing{ shaderUniformTexelBufferArrayDynamicIndexing_ }, shaderStorageTexelBufferArrayDynamicIndexing{ shaderStorageTexelBufferArrayDynamicIndexing_ }, shaderUniformBufferArrayNonUniformIndexing{ shaderUniformBufferArrayNonUniformIndexing_ }, shaderSampledImageArrayNonUniformIndexing{ shaderSampledImageArrayNonUniformIndexing_ }, shaderStorageBufferArrayNonUniformIndexing{ shaderStorageBufferArrayNonUniformIndexing_ }, shaderStorageImageArrayNonUniformIndexing{ shaderStorageImageArrayNonUniformIndexing_ }, shaderInputAttachmentArrayNonUniformIndexing{ shaderInputAttachmentArrayNonUniformIndexing_ }, shaderUniformTexelBufferArrayNonUniformIndexing{ shaderUniformTexelBufferArrayNonUniformIndexing_ }, shaderStorageTexelBufferArrayNonUniformIndexing{ shaderStorageTexelBufferArrayNonUniformIndexing_ }, descriptorBindingUniformBufferUpdateAfterBind{ descriptorBindingUniformBufferUpdateAfterBind_ }, descriptorBindingSampledImageUpdateAfterBind{ descriptorBindingSampledImageUpdateAfterBind_ }, descriptorBindingStorageImageUpdateAfterBind{ descriptorBindingStorageImageUpdateAfterBind_ }, descriptorBindingStorageBufferUpdateAfterBind{ descriptorBindingStorageBufferUpdateAfterBind_ }, descriptorBindingUniformTexelBufferUpdateAfterBind{ descriptorBindingUniformTexelBufferUpdateAfterBind_ }, descriptorBindingStorageTexelBufferUpdateAfterBind{ descriptorBindingStorageTexelBufferUpdateAfterBind_ }, descriptorBindingUpdateUnusedWhilePending{ descriptorBindingUpdateUnusedWhilePending_ }, descriptorBindingPartiallyBound{ descriptorBindingPartiallyBound_ }, descriptorBindingVariableDescriptorCount{ descriptorBindingVariableDescriptorCount_ }, runtimeDescriptorArray{ runtimeDescriptorArray_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDescriptorIndexingFeatures( *reinterpret_cast<PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs ) )
{}
PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
{
runtimeDescriptorArray = runtimeDescriptorArray_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
}
operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
}
operator VkPhysicalDeviceDescriptorIndexingFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
}
operator VkPhysicalDeviceDescriptorIndexingFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderInputAttachmentArrayDynamicIndexing, shaderUniformTexelBufferArrayDynamicIndexing, shaderStorageTexelBufferArrayDynamicIndexing, shaderUniformBufferArrayNonUniformIndexing, shaderSampledImageArrayNonUniformIndexing, shaderStorageBufferArrayNonUniformIndexing, shaderStorageImageArrayNonUniformIndexing, shaderInputAttachmentArrayNonUniformIndexing, shaderUniformTexelBufferArrayNonUniformIndexing, shaderStorageTexelBufferArrayNonUniformIndexing, descriptorBindingUniformBufferUpdateAfterBind, descriptorBindingSampledImageUpdateAfterBind, descriptorBindingStorageImageUpdateAfterBind, descriptorBindingStorageBufferUpdateAfterBind, descriptorBindingUniformTexelBufferUpdateAfterBind, descriptorBindingStorageTexelBufferUpdateAfterBind, descriptorBindingUpdateUnusedWhilePending, descriptorBindingPartiallyBound, descriptorBindingVariableDescriptorCount, runtimeDescriptorArray );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
&& ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
&& ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
&& ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
&& ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
&& ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
&& ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
&& ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
&& ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
&& ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
&& ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
&& ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
&& ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
&& ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
&& ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
&& ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
&& ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
&& ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
&& ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
&& ( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
#endif
}
bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingFeatures>
{
using Type = PhysicalDeviceDescriptorIndexingFeatures;
};
using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;
// wrapper struct for struct VkPhysicalDeviceDescriptorIndexingProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorIndexingProperties.html
struct PhysicalDeviceDescriptorIndexingProperties
{
using NativeType = VkPhysicalDeviceDescriptorIndexingProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxUpdateAfterBindDescriptorsInAllPools{ maxUpdateAfterBindDescriptorsInAllPools_ }, shaderUniformBufferArrayNonUniformIndexingNative{ shaderUniformBufferArrayNonUniformIndexingNative_ }, shaderSampledImageArrayNonUniformIndexingNative{ shaderSampledImageArrayNonUniformIndexingNative_ }, shaderStorageBufferArrayNonUniformIndexingNative{ shaderStorageBufferArrayNonUniformIndexingNative_ }, shaderStorageImageArrayNonUniformIndexingNative{ shaderStorageImageArrayNonUniformIndexingNative_ }, shaderInputAttachmentArrayNonUniformIndexingNative{ shaderInputAttachmentArrayNonUniformIndexingNative_ }, robustBufferAccessUpdateAfterBind{ robustBufferAccessUpdateAfterBind_ }, quadDivergentImplicitLod{ quadDivergentImplicitLod_ }, maxPerStageDescriptorUpdateAfterBindSamplers{ maxPerStageDescriptorUpdateAfterBindSamplers_ }, maxPerStageDescriptorUpdateAfterBindUniformBuffers{ maxPerStageDescriptorUpdateAfterBindUniformBuffers_ }, maxPerStageDescriptorUpdateAfterBindStorageBuffers{ maxPerStageDescriptorUpdateAfterBindStorageBuffers_ }, maxPerStageDescriptorUpdateAfterBindSampledImages{ maxPerStageDescriptorUpdateAfterBindSampledImages_ }, maxPerStageDescriptorUpdateAfterBindStorageImages{ maxPerStageDescriptorUpdateAfterBindStorageImages_ }, maxPerStageDescriptorUpdateAfterBindInputAttachments{ maxPerStageDescriptorUpdateAfterBindInputAttachments_ }, maxPerStageUpdateAfterBindResources{ maxPerStageUpdateAfterBindResources_ }, maxDescriptorSetUpdateAfterBindSamplers{ maxDescriptorSetUpdateAfterBindSamplers_ }, maxDescriptorSetUpdateAfterBindUniformBuffers{ maxDescriptorSetUpdateAfterBindUniformBuffers_ }, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ }, maxDescriptorSetUpdateAfterBindStorageBuffers{ maxDescriptorSetUpdateAfterBindStorageBuffers_ }, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ }, maxDescriptorSetUpdateAfterBindSampledImages{ maxDescriptorSetUpdateAfterBindSampledImages_ }, maxDescriptorSetUpdateAfterBindStorageImages{ maxDescriptorSetUpdateAfterBindStorageImages_ }, maxDescriptorSetUpdateAfterBindInputAttachments{ maxDescriptorSetUpdateAfterBindInputAttachments_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDescriptorIndexingProperties( *reinterpret_cast<PhysicalDeviceDescriptorIndexingProperties const *>( &rhs ) )
{}
PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties*>( this );
}
operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties*>( this );
}
operator VkPhysicalDeviceDescriptorIndexingProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties*>( this );
}
operator VkPhysicalDeviceDescriptorIndexingProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxUpdateAfterBindDescriptorsInAllPools, shaderUniformBufferArrayNonUniformIndexingNative, shaderSampledImageArrayNonUniformIndexingNative, shaderStorageBufferArrayNonUniformIndexingNative, shaderStorageImageArrayNonUniformIndexingNative, shaderInputAttachmentArrayNonUniformIndexingNative, robustBufferAccessUpdateAfterBind, quadDivergentImplicitLod, maxPerStageDescriptorUpdateAfterBindSamplers, maxPerStageDescriptorUpdateAfterBindUniformBuffers, maxPerStageDescriptorUpdateAfterBindStorageBuffers, maxPerStageDescriptorUpdateAfterBindSampledImages, maxPerStageDescriptorUpdateAfterBindStorageImages, maxPerStageDescriptorUpdateAfterBindInputAttachments, maxPerStageUpdateAfterBindResources, maxDescriptorSetUpdateAfterBindSamplers, maxDescriptorSetUpdateAfterBindUniformBuffers, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, maxDescriptorSetUpdateAfterBindStorageBuffers, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, maxDescriptorSetUpdateAfterBindSampledImages, maxDescriptorSetUpdateAfterBindStorageImages, maxDescriptorSetUpdateAfterBindInputAttachments );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
&& ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
&& ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
&& ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
&& ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
&& ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
&& ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
&& ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
&& ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
&& ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
&& ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
&& ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
&& ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
&& ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
&& ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
&& ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
&& ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
&& ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
&& ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
&& ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
&& ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
&& ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
&& ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
#endif
}
bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
void * pNext = {};
uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
uint32_t maxPerStageUpdateAfterBindResources = {};
uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingProperties>
{
using Type = PhysicalDeviceDescriptorIndexingProperties;
};
using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;
// wrapper struct for struct VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV.html
struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV
{
using NativeType = VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorPoolOverallocationFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, descriptorPoolOverallocation{ descriptorPoolOverallocation_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( *reinterpret_cast<PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & operator=( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & operator=( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & setDescriptorPoolOverallocation( VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation_ ) VULKAN_HPP_NOEXCEPT
{
descriptorPoolOverallocation = descriptorPoolOverallocation_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV*>( this );
}
operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV*>( this );
}
operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV*>( this );
}
operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, descriptorPoolOverallocation );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( descriptorPoolOverallocation == rhs.descriptorPoolOverallocation );
#endif
}
bool operator!=( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV>
{
using Type = PhysicalDeviceDescriptorPoolOverallocationFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE.html
struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE
{
using NativeType = VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE(VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, descriptorSetHostMapping{ descriptorSetHostMapping_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( *reinterpret_cast<PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *>( &rhs ) )
{}
PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setDescriptorSetHostMapping( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetHostMapping = descriptorSetHostMapping_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE*>( this );
}
operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE*>( this );
}
operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE*>( this );
}
operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, descriptorSetHostMapping );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & ) const = default;
#else
bool operator==( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( descriptorSetHostMapping == rhs.descriptorSetHostMapping );
#endif
}
bool operator!=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>
{
using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
};
// wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.html
struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV
{
using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceGeneratedCompute{ deviceGeneratedCompute_ }, deviceGeneratedComputePipelines{ deviceGeneratedComputePipelines_ }, deviceGeneratedComputeCaptureReplay{ deviceGeneratedComputeCaptureReplay_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setDeviceGeneratedCompute( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute_ ) VULKAN_HPP_NOEXCEPT
{
deviceGeneratedCompute = deviceGeneratedCompute_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setDeviceGeneratedComputePipelines( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ ) VULKAN_HPP_NOEXCEPT
{
deviceGeneratedComputePipelines = deviceGeneratedComputePipelines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setDeviceGeneratedComputeCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
deviceGeneratedComputeCaptureReplay = deviceGeneratedComputeCaptureReplay_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceGeneratedCompute, deviceGeneratedComputePipelines, deviceGeneratedComputeCaptureReplay );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceGeneratedCompute == rhs.deviceGeneratedCompute )
&& ( deviceGeneratedComputePipelines == rhs.deviceGeneratedComputePipelines )
&& ( deviceGeneratedComputeCaptureReplay == rhs.deviceGeneratedComputeCaptureReplay );
#endif
}
bool operator!=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV>
{
using Type = PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT.html
struct PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT
{
using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dynamicGeneratedPipelineLayout_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceGeneratedCommands{ deviceGeneratedCommands_ }, dynamicGeneratedPipelineLayout{ dynamicGeneratedPipelineLayout_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT
{
deviceGeneratedCommands = deviceGeneratedCommands_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setDynamicGeneratedPipelineLayout( VULKAN_HPP_NAMESPACE::Bool32 dynamicGeneratedPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
{
dynamicGeneratedPipelineLayout = dynamicGeneratedPipelineLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceGeneratedCommands, dynamicGeneratedPipelineLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceGeneratedCommands == rhs.deviceGeneratedCommands )
&& ( dynamicGeneratedPipelineLayout == rhs.dynamicGeneratedPipelineLayout );
#endif
}
bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {};
VULKAN_HPP_NAMESPACE::Bool32 dynamicGeneratedPipelineLayout = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT>
{
using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV.html
struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV
{
using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceGeneratedCommands{ deviceGeneratedCommands_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT
{
deviceGeneratedCommands = deviceGeneratedCommands_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceGeneratedCommands );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceGeneratedCommands == rhs.deviceGeneratedCommands );
#endif
}
bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
{
using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT.html
struct PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT
{
using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT(uint32_t maxIndirectPipelineCount_ = {}, uint32_t maxIndirectShaderObjectCount_ = {}, uint32_t maxIndirectSequenceCount_ = {}, uint32_t maxIndirectCommandsTokenCount_ = {}, uint32_t maxIndirectCommandsTokenOffset_ = {}, uint32_t maxIndirectCommandsIndirectStride_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStages_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsTransformFeedback_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsMultiDrawIndirectCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxIndirectPipelineCount{ maxIndirectPipelineCount_ }, maxIndirectShaderObjectCount{ maxIndirectShaderObjectCount_ }, maxIndirectSequenceCount{ maxIndirectSequenceCount_ }, maxIndirectCommandsTokenCount{ maxIndirectCommandsTokenCount_ }, maxIndirectCommandsTokenOffset{ maxIndirectCommandsTokenOffset_ }, maxIndirectCommandsIndirectStride{ maxIndirectCommandsIndirectStride_ }, supportedIndirectCommandsInputModes{ supportedIndirectCommandsInputModes_ }, supportedIndirectCommandsShaderStages{ supportedIndirectCommandsShaderStages_ }, supportedIndirectCommandsShaderStagesPipelineBinding{ supportedIndirectCommandsShaderStagesPipelineBinding_ }, supportedIndirectCommandsShaderStagesShaderBinding{ supportedIndirectCommandsShaderStagesShaderBinding_ }, deviceGeneratedCommandsTransformFeedback{ deviceGeneratedCommandsTransformFeedback_ }, deviceGeneratedCommandsMultiDrawIndirectCount{ deviceGeneratedCommandsMultiDrawIndirectCount_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagsEXT const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxIndirectPipelineCount, maxIndirectShaderObjectCount, maxIndirectSequenceCount, maxIndirectCommandsTokenCount, maxIndirectCommandsTokenOffset, maxIndirectCommandsIndirectStride, supportedIndirectCommandsInputModes, supportedIndirectCommandsShaderStages, supportedIndirectCommandsShaderStagesPipelineBinding, supportedIndirectCommandsShaderStagesShaderBinding, deviceGeneratedCommandsTransformFeedback, deviceGeneratedCommandsMultiDrawIndirectCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxIndirectPipelineCount == rhs.maxIndirectPipelineCount )
&& ( maxIndirectShaderObjectCount == rhs.maxIndirectShaderObjectCount )
&& ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount )
&& ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount )
&& ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset )
&& ( maxIndirectCommandsIndirectStride == rhs.maxIndirectCommandsIndirectStride )
&& ( supportedIndirectCommandsInputModes == rhs.supportedIndirectCommandsInputModes )
&& ( supportedIndirectCommandsShaderStages == rhs.supportedIndirectCommandsShaderStages )
&& ( supportedIndirectCommandsShaderStagesPipelineBinding == rhs.supportedIndirectCommandsShaderStagesPipelineBinding )
&& ( supportedIndirectCommandsShaderStagesShaderBinding == rhs.supportedIndirectCommandsShaderStagesShaderBinding )
&& ( deviceGeneratedCommandsTransformFeedback == rhs.deviceGeneratedCommandsTransformFeedback )
&& ( deviceGeneratedCommandsMultiDrawIndirectCount == rhs.deviceGeneratedCommandsMultiDrawIndirectCount );
#endif
}
bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT;
void * pNext = {};
uint32_t maxIndirectPipelineCount = {};
uint32_t maxIndirectShaderObjectCount = {};
uint32_t maxIndirectSequenceCount = {};
uint32_t maxIndirectCommandsTokenCount = {};
uint32_t maxIndirectCommandsTokenOffset = {};
uint32_t maxIndirectCommandsIndirectStride = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStages = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsTransformFeedback = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsMultiDrawIndirectCount = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT>
{
using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV.html
struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV
{
using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(uint32_t maxGraphicsShaderGroupCount_ = {}, uint32_t maxIndirectSequenceCount_ = {}, uint32_t maxIndirectCommandsTokenCount_ = {}, uint32_t maxIndirectCommandsStreamCount_ = {}, uint32_t maxIndirectCommandsTokenOffset_ = {}, uint32_t maxIndirectCommandsStreamStride_ = {}, uint32_t minSequencesCountBufferOffsetAlignment_ = {}, uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxGraphicsShaderGroupCount{ maxGraphicsShaderGroupCount_ }, maxIndirectSequenceCount{ maxIndirectSequenceCount_ }, maxIndirectCommandsTokenCount{ maxIndirectCommandsTokenCount_ }, maxIndirectCommandsStreamCount{ maxIndirectCommandsStreamCount_ }, maxIndirectCommandsTokenOffset{ maxIndirectCommandsTokenOffset_ }, maxIndirectCommandsStreamStride{ maxIndirectCommandsStreamStride_ }, minSequencesCountBufferOffsetAlignment{ minSequencesCountBufferOffsetAlignment_ }, minSequencesIndexBufferOffsetAlignment{ minSequencesIndexBufferOffsetAlignment_ }, minIndirectCommandsBufferOffsetAlignment{ minIndirectCommandsBufferOffsetAlignment_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
}
operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxGraphicsShaderGroupCount, maxIndirectSequenceCount, maxIndirectCommandsTokenCount, maxIndirectCommandsStreamCount, maxIndirectCommandsTokenOffset, maxIndirectCommandsStreamStride, minSequencesCountBufferOffsetAlignment, minSequencesIndexBufferOffsetAlignment, minIndirectCommandsBufferOffsetAlignment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount )
&& ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount )
&& ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount )
&& ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount )
&& ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset )
&& ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride )
&& ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment )
&& ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment )
&& ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment );
#endif
}
bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
void * pNext = {};
uint32_t maxGraphicsShaderGroupCount = {};
uint32_t maxIndirectSequenceCount = {};
uint32_t maxIndirectCommandsTokenCount = {};
uint32_t maxIndirectCommandsStreamCount = {};
uint32_t maxIndirectCommandsTokenOffset = {};
uint32_t maxIndirectCommandsStreamStride = {};
uint32_t minSequencesCountBufferOffsetAlignment = {};
uint32_t minSequencesIndexBufferOffsetAlignment = {};
uint32_t minIndirectCommandsBufferOffsetAlignment = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV>
{
using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceMemoryReportFeaturesEXT.html
struct PhysicalDeviceDeviceMemoryReportFeaturesEXT
{
using NativeType = VkPhysicalDeviceDeviceMemoryReportFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceMemoryReport{ deviceMemoryReport_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDeviceMemoryReportFeaturesEXT( *reinterpret_cast<PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT
{
deviceMemoryReport = deviceMemoryReport_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDeviceMemoryReportFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDeviceMemoryReportFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceMemoryReport );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceMemoryReport == rhs.deviceMemoryReport );
#endif
}
bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT>
{
using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDiagnosticsConfigFeaturesNV.html
struct PhysicalDeviceDiagnosticsConfigFeaturesNV
{
using NativeType = VkPhysicalDeviceDiagnosticsConfigFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, diagnosticsConfig{ diagnosticsConfig_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDiagnosticsConfigFeaturesNV( *reinterpret_cast<PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT
{
diagnosticsConfig = diagnosticsConfig_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
}
operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
}
operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
}
operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, diagnosticsConfig );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( diagnosticsConfig == rhs.diagnosticsConfig );
#endif
}
bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV>
{
using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceDiscardRectanglePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDiscardRectanglePropertiesEXT.html
struct PhysicalDeviceDiscardRectanglePropertiesEXT
{
using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT(uint32_t maxDiscardRectangles_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxDiscardRectangles{ maxDiscardRectangles_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDiscardRectanglePropertiesEXT( *reinterpret_cast<PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
}
operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
}
operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
}
operator VkPhysicalDeviceDiscardRectanglePropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxDiscardRectangles );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxDiscardRectangles == rhs.maxDiscardRectangles );
#endif
}
bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
void * pNext = {};
uint32_t maxDiscardRectangles = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT>
{
using Type = PhysicalDeviceDiscardRectanglePropertiesEXT;
};
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkPhysicalDeviceDisplacementMicromapFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDisplacementMicromapFeaturesNV.html
struct PhysicalDeviceDisplacementMicromapFeaturesNV
{
using NativeType = VkPhysicalDeviceDisplacementMicromapFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, displacementMicromap{ displacementMicromap_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDisplacementMicromapFeaturesNV( VkPhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDisplacementMicromapFeaturesNV( *reinterpret_cast<PhysicalDeviceDisplacementMicromapFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceDisplacementMicromapFeaturesNV & operator=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDisplacementMicromapFeaturesNV & operator=( VkPhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & setDisplacementMicromap( VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap_ ) VULKAN_HPP_NOEXCEPT
{
displacementMicromap = displacementMicromap_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDisplacementMicromapFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDisplacementMicromapFeaturesNV*>( this );
}
operator VkPhysicalDeviceDisplacementMicromapFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDisplacementMicromapFeaturesNV*>( this );
}
operator VkPhysicalDeviceDisplacementMicromapFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDisplacementMicromapFeaturesNV*>( this );
}
operator VkPhysicalDeviceDisplacementMicromapFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDisplacementMicromapFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, displacementMicromap );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDisplacementMicromapFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( displacementMicromap == rhs.displacementMicromap );
#endif
}
bool operator!=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV>
{
using Type = PhysicalDeviceDisplacementMicromapFeaturesNV;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkPhysicalDeviceDisplacementMicromapPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDisplacementMicromapPropertiesNV.html
struct PhysicalDeviceDisplacementMicromapPropertiesNV
{
using NativeType = VkPhysicalDeviceDisplacementMicromapPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapPropertiesNV(uint32_t maxDisplacementMicromapSubdivisionLevel_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxDisplacementMicromapSubdivisionLevel{ maxDisplacementMicromapSubdivisionLevel_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapPropertiesNV( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDisplacementMicromapPropertiesNV( VkPhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDisplacementMicromapPropertiesNV( *reinterpret_cast<PhysicalDeviceDisplacementMicromapPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceDisplacementMicromapPropertiesNV & operator=( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDisplacementMicromapPropertiesNV & operator=( VkPhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceDisplacementMicromapPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDisplacementMicromapPropertiesNV*>( this );
}
operator VkPhysicalDeviceDisplacementMicromapPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDisplacementMicromapPropertiesNV*>( this );
}
operator VkPhysicalDeviceDisplacementMicromapPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDisplacementMicromapPropertiesNV*>( this );
}
operator VkPhysicalDeviceDisplacementMicromapPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDisplacementMicromapPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxDisplacementMicromapSubdivisionLevel );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDisplacementMicromapPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxDisplacementMicromapSubdivisionLevel == rhs.maxDisplacementMicromapSubdivisionLevel );
#endif
}
bool operator!=( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV;
void * pNext = {};
uint32_t maxDisplacementMicromapSubdivisionLevel = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV>
{
using Type = PhysicalDeviceDisplacementMicromapPropertiesNV;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
// wrapper struct for struct VkPhysicalDeviceDriverProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDriverProperties.html
struct PhysicalDeviceDriverProperties
{
using NativeType = VkPhysicalDeviceDriverProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array<char,VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {}, std::array<char,VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, driverID{ driverID_ }, driverName{ driverName_ }, driverInfo{ driverInfo_ }, conformanceVersion{ conformanceVersion_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDriverProperties( *reinterpret_cast<PhysicalDeviceDriverProperties const *>( &rhs ) )
{}
PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDriverProperties*>( this );
}
operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDriverProperties*>( this );
}
operator VkPhysicalDeviceDriverProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDriverProperties*>( this );
}
operator VkPhysicalDeviceDriverProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDriverProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DriverId const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> const &, VULKAN_HPP_NAMESPACE::ConformanceVersion const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 ) return cmp;
if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( driverID == rhs.driverID )
&& ( strcmp( driverName, rhs.driverName ) == 0 )
&& ( strcmp( driverInfo, rhs.driverInfo ) == 0 )
&& ( conformanceVersion == rhs.conformanceVersion );
}
bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDriverProperties>
{
using Type = PhysicalDeviceDriverProperties;
};
using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;
// wrapper struct for struct VkPhysicalDeviceDrmPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDrmPropertiesEXT.html
struct PhysicalDeviceDrmPropertiesEXT
{
using NativeType = VkPhysicalDeviceDrmPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDrmPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 hasPrimary_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hasRender_ = {}, int64_t primaryMajor_ = {}, int64_t primaryMinor_ = {}, int64_t renderMajor_ = {}, int64_t renderMinor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, hasPrimary{ hasPrimary_ }, hasRender{ hasRender_ }, primaryMajor{ primaryMajor_ }, primaryMinor{ primaryMinor_ }, renderMajor{ renderMajor_ }, renderMinor{ renderMinor_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDrmPropertiesEXT( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDrmPropertiesEXT( *reinterpret_cast<PhysicalDeviceDrmPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceDrmPropertiesEXT & operator=( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDrmPropertiesEXT & operator=( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDrmPropertiesEXT*>( this );
}
operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDrmPropertiesEXT*>( this );
}
operator VkPhysicalDeviceDrmPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDrmPropertiesEXT*>( this );
}
operator VkPhysicalDeviceDrmPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDrmPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, int64_t const &, int64_t const &, int64_t const &, int64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, hasPrimary, hasRender, primaryMajor, primaryMinor, renderMajor, renderMinor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDrmPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( hasPrimary == rhs.hasPrimary )
&& ( hasRender == rhs.hasRender )
&& ( primaryMajor == rhs.primaryMajor )
&& ( primaryMinor == rhs.primaryMinor )
&& ( renderMajor == rhs.renderMajor )
&& ( renderMinor == rhs.renderMinor );
#endif
}
bool operator!=( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDrmPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 hasPrimary = {};
VULKAN_HPP_NAMESPACE::Bool32 hasRender = {};
int64_t primaryMajor = {};
int64_t primaryMinor = {};
int64_t renderMajor = {};
int64_t renderMinor = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDrmPropertiesEXT>
{
using Type = PhysicalDeviceDrmPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceDynamicRenderingFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDynamicRenderingFeatures.html
struct PhysicalDeviceDynamicRenderingFeatures
{
using NativeType = VkPhysicalDeviceDynamicRenderingFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures(VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dynamicRendering{ dynamicRendering_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDynamicRenderingFeatures( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDynamicRenderingFeatures( *reinterpret_cast<PhysicalDeviceDynamicRenderingFeatures const *>( &rhs ) )
{}
PhysicalDeviceDynamicRenderingFeatures & operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDynamicRenderingFeatures & operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
{
dynamicRendering = dynamicRendering_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDynamicRenderingFeatures*>( this );
}
operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDynamicRenderingFeatures*>( this );
}
operator VkPhysicalDeviceDynamicRenderingFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDynamicRenderingFeatures*>( this );
}
operator VkPhysicalDeviceDynamicRenderingFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDynamicRenderingFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dynamicRendering );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDynamicRenderingFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dynamicRendering == rhs.dynamicRendering );
#endif
}
bool operator!=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDynamicRenderingFeatures>
{
using Type = PhysicalDeviceDynamicRenderingFeatures;
};
using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures;
// wrapper struct for struct VkPhysicalDeviceDynamicRenderingLocalReadFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDynamicRenderingLocalReadFeatures.html
struct PhysicalDeviceDynamicRenderingLocalReadFeatures
{
using NativeType = VkPhysicalDeviceDynamicRenderingLocalReadFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingLocalReadFeatures(VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingLocalReadFeatures( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDynamicRenderingLocalReadFeatures( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDynamicRenderingLocalReadFeatures( *reinterpret_cast<PhysicalDeviceDynamicRenderingLocalReadFeatures const *>( &rhs ) )
{}
PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures & setDynamicRenderingLocalRead( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ ) VULKAN_HPP_NOEXCEPT
{
dynamicRenderingLocalRead = dynamicRenderingLocalRead_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDynamicRenderingLocalReadFeatures*>( this );
}
operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDynamicRenderingLocalReadFeatures*>( this );
}
operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDynamicRenderingLocalReadFeatures*>( this );
}
operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDynamicRenderingLocalReadFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dynamicRenderingLocalRead );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDynamicRenderingLocalReadFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dynamicRenderingLocalRead == rhs.dynamicRenderingLocalRead );
#endif
}
bool operator!=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures>
{
using Type = PhysicalDeviceDynamicRenderingLocalReadFeatures;
};
using PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = PhysicalDeviceDynamicRenderingLocalReadFeatures;
// wrapper struct for struct VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.html
struct PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT
{
using NativeType = VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dynamicRenderingUnusedAttachments{ dynamicRenderingUnusedAttachments_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( *reinterpret_cast<PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & operator=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & operator=( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & setDynamicRenderingUnusedAttachments( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments_ ) VULKAN_HPP_NOEXCEPT
{
dynamicRenderingUnusedAttachments = dynamicRenderingUnusedAttachments_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT*>( this );
}
operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dynamicRenderingUnusedAttachments );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dynamicRenderingUnusedAttachments == rhs.dynamicRenderingUnusedAttachments );
#endif
}
bool operator!=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT>
{
using Type = PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceExclusiveScissorFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExclusiveScissorFeaturesNV.html
struct PhysicalDeviceExclusiveScissorFeaturesNV
{
using NativeType = VkPhysicalDeviceExclusiveScissorFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, exclusiveScissor{ exclusiveScissor_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExclusiveScissorFeaturesNV( *reinterpret_cast<PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
{
exclusiveScissor = exclusiveScissor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
}
operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
}
operator VkPhysicalDeviceExclusiveScissorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
}
operator VkPhysicalDeviceExclusiveScissorFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, exclusiveScissor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( exclusiveScissor == rhs.exclusiveScissor );
#endif
}
bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV>
{
using Type = PhysicalDeviceExclusiveScissorFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicState2FeaturesEXT.html
struct PhysicalDeviceExtendedDynamicState2FeaturesEXT
{
using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, extendedDynamicState2{ extendedDynamicState2_ }, extendedDynamicState2LogicOp{ extendedDynamicState2LogicOp_ }, extendedDynamicState2PatchControlPoints{ extendedDynamicState2PatchControlPoints_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState2 = extendedDynamicState2_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2PatchControlPoints( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState2FeaturesEXT*>( this );
}
operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT*>( this );
}
operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState2FeaturesEXT*>( this );
}
operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( extendedDynamicState2 == rhs.extendedDynamicState2 )
&& ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp )
&& ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints );
#endif
}
bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT>
{
using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceExtendedDynamicState3FeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicState3FeaturesEXT.html
struct PhysicalDeviceExtendedDynamicState3FeaturesEXT
{
using NativeType = VkPhysicalDeviceExtendedDynamicState3FeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, extendedDynamicState3TessellationDomainOrigin{ extendedDynamicState3TessellationDomainOrigin_ }, extendedDynamicState3DepthClampEnable{ extendedDynamicState3DepthClampEnable_ }, extendedDynamicState3PolygonMode{ extendedDynamicState3PolygonMode_ }, extendedDynamicState3RasterizationSamples{ extendedDynamicState3RasterizationSamples_ }, extendedDynamicState3SampleMask{ extendedDynamicState3SampleMask_ }, extendedDynamicState3AlphaToCoverageEnable{ extendedDynamicState3AlphaToCoverageEnable_ }, extendedDynamicState3AlphaToOneEnable{ extendedDynamicState3AlphaToOneEnable_ }, extendedDynamicState3LogicOpEnable{ extendedDynamicState3LogicOpEnable_ }, extendedDynamicState3ColorBlendEnable{ extendedDynamicState3ColorBlendEnable_ }, extendedDynamicState3ColorBlendEquation{ extendedDynamicState3ColorBlendEquation_ }, extendedDynamicState3ColorWriteMask{ extendedDynamicState3ColorWriteMask_ }, extendedDynamicState3RasterizationStream{ extendedDynamicState3RasterizationStream_ }, extendedDynamicState3ConservativeRasterizationMode{ extendedDynamicState3ConservativeRasterizationMode_ }, extendedDynamicState3ExtraPrimitiveOverestimationSize{ extendedDynamicState3ExtraPrimitiveOverestimationSize_ }, extendedDynamicState3DepthClipEnable{ extendedDynamicState3DepthClipEnable_ }, extendedDynamicState3SampleLocationsEnable{ extendedDynamicState3SampleLocationsEnable_ }, extendedDynamicState3ColorBlendAdvanced{ extendedDynamicState3ColorBlendAdvanced_ }, extendedDynamicState3ProvokingVertexMode{ extendedDynamicState3ProvokingVertexMode_ }, extendedDynamicState3LineRasterizationMode{ extendedDynamicState3LineRasterizationMode_ }, extendedDynamicState3LineStippleEnable{ extendedDynamicState3LineStippleEnable_ }, extendedDynamicState3DepthClipNegativeOneToOne{ extendedDynamicState3DepthClipNegativeOneToOne_ }, extendedDynamicState3ViewportWScalingEnable{ extendedDynamicState3ViewportWScalingEnable_ }, extendedDynamicState3ViewportSwizzle{ extendedDynamicState3ViewportSwizzle_ }, extendedDynamicState3CoverageToColorEnable{ extendedDynamicState3CoverageToColorEnable_ }, extendedDynamicState3CoverageToColorLocation{ extendedDynamicState3CoverageToColorLocation_ }, extendedDynamicState3CoverageModulationMode{ extendedDynamicState3CoverageModulationMode_ }, extendedDynamicState3CoverageModulationTableEnable{ extendedDynamicState3CoverageModulationTableEnable_ }, extendedDynamicState3CoverageModulationTable{ extendedDynamicState3CoverageModulationTable_ }, extendedDynamicState3CoverageReductionMode{ extendedDynamicState3CoverageReductionMode_ }, extendedDynamicState3RepresentativeFragmentTestEnable{ extendedDynamicState3RepresentativeFragmentTestEnable_ }, extendedDynamicState3ShadingRateImageEnable{ extendedDynamicState3ShadingRateImageEnable_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3FeaturesEXT( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExtendedDynamicState3FeaturesEXT( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExtendedDynamicState3FeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState3FeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3TessellationDomainOrigin( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3TessellationDomainOrigin = extendedDynamicState3TessellationDomainOrigin_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3DepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3DepthClampEnable = extendedDynamicState3DepthClampEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3PolygonMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RasterizationSamples( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3SampleMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3AlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3AlphaToCoverageEnable = extendedDynamicState3AlphaToCoverageEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3AlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3AlphaToOneEnable = extendedDynamicState3AlphaToOneEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3LogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3LogicOpEnable = extendedDynamicState3LogicOpEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3ColorBlendEnable = extendedDynamicState3ColorBlendEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorBlendEquation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3ColorBlendEquation = extendedDynamicState3ColorBlendEquation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorWriteMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3ColorWriteMask = extendedDynamicState3ColorWriteMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RasterizationStream( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3RasterizationStream = extendedDynamicState3RasterizationStream_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3ConservativeRasterizationMode = extendedDynamicState3ConservativeRasterizationMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ExtraPrimitiveOverestimationSize( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3ExtraPrimitiveOverestimationSize = extendedDynamicState3ExtraPrimitiveOverestimationSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3DepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3DepthClipEnable = extendedDynamicState3DepthClipEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3SampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3SampleLocationsEnable = extendedDynamicState3SampleLocationsEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorBlendAdvanced( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3ColorBlendAdvanced = extendedDynamicState3ColorBlendAdvanced_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ProvokingVertexMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3ProvokingVertexMode = extendedDynamicState3ProvokingVertexMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3LineRasterizationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3LineRasterizationMode = extendedDynamicState3LineRasterizationMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3LineStippleEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3LineStippleEnable = extendedDynamicState3LineStippleEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3DepthClipNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3DepthClipNegativeOneToOne = extendedDynamicState3DepthClipNegativeOneToOne_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3ViewportWScalingEnable = extendedDynamicState3ViewportWScalingEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ViewportSwizzle( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3ViewportSwizzle = extendedDynamicState3ViewportSwizzle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3CoverageToColorEnable = extendedDynamicState3CoverageToColorEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageToColorLocation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3CoverageToColorLocation = extendedDynamicState3CoverageToColorLocation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3CoverageModulationMode = extendedDynamicState3CoverageModulationMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3CoverageModulationTableEnable = extendedDynamicState3CoverageModulationTableEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationTable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3CoverageModulationTable = extendedDynamicState3CoverageModulationTable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3CoverageReductionMode = extendedDynamicState3CoverageReductionMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3RepresentativeFragmentTestEnable = extendedDynamicState3RepresentativeFragmentTestEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState3ShadingRateImageEnable = extendedDynamicState3ShadingRateImageEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3FeaturesEXT*>( this );
}
operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3FeaturesEXT*>( this );
}
operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3FeaturesEXT*>( this );
}
operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3FeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, extendedDynamicState3TessellationDomainOrigin, extendedDynamicState3DepthClampEnable, extendedDynamicState3PolygonMode, extendedDynamicState3RasterizationSamples, extendedDynamicState3SampleMask, extendedDynamicState3AlphaToCoverageEnable, extendedDynamicState3AlphaToOneEnable, extendedDynamicState3LogicOpEnable, extendedDynamicState3ColorBlendEnable, extendedDynamicState3ColorBlendEquation, extendedDynamicState3ColorWriteMask, extendedDynamicState3RasterizationStream, extendedDynamicState3ConservativeRasterizationMode, extendedDynamicState3ExtraPrimitiveOverestimationSize, extendedDynamicState3DepthClipEnable, extendedDynamicState3SampleLocationsEnable, extendedDynamicState3ColorBlendAdvanced, extendedDynamicState3ProvokingVertexMode, extendedDynamicState3LineRasterizationMode, extendedDynamicState3LineStippleEnable, extendedDynamicState3DepthClipNegativeOneToOne, extendedDynamicState3ViewportWScalingEnable, extendedDynamicState3ViewportSwizzle, extendedDynamicState3CoverageToColorEnable, extendedDynamicState3CoverageToColorLocation, extendedDynamicState3CoverageModulationMode, extendedDynamicState3CoverageModulationTableEnable, extendedDynamicState3CoverageModulationTable, extendedDynamicState3CoverageReductionMode, extendedDynamicState3RepresentativeFragmentTestEnable, extendedDynamicState3ShadingRateImageEnable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( extendedDynamicState3TessellationDomainOrigin == rhs.extendedDynamicState3TessellationDomainOrigin )
&& ( extendedDynamicState3DepthClampEnable == rhs.extendedDynamicState3DepthClampEnable )
&& ( extendedDynamicState3PolygonMode == rhs.extendedDynamicState3PolygonMode )
&& ( extendedDynamicState3RasterizationSamples == rhs.extendedDynamicState3RasterizationSamples )
&& ( extendedDynamicState3SampleMask == rhs.extendedDynamicState3SampleMask )
&& ( extendedDynamicState3AlphaToCoverageEnable == rhs.extendedDynamicState3AlphaToCoverageEnable )
&& ( extendedDynamicState3AlphaToOneEnable == rhs.extendedDynamicState3AlphaToOneEnable )
&& ( extendedDynamicState3LogicOpEnable == rhs.extendedDynamicState3LogicOpEnable )
&& ( extendedDynamicState3ColorBlendEnable == rhs.extendedDynamicState3ColorBlendEnable )
&& ( extendedDynamicState3ColorBlendEquation == rhs.extendedDynamicState3ColorBlendEquation )
&& ( extendedDynamicState3ColorWriteMask == rhs.extendedDynamicState3ColorWriteMask )
&& ( extendedDynamicState3RasterizationStream == rhs.extendedDynamicState3RasterizationStream )
&& ( extendedDynamicState3ConservativeRasterizationMode == rhs.extendedDynamicState3ConservativeRasterizationMode )
&& ( extendedDynamicState3ExtraPrimitiveOverestimationSize == rhs.extendedDynamicState3ExtraPrimitiveOverestimationSize )
&& ( extendedDynamicState3DepthClipEnable == rhs.extendedDynamicState3DepthClipEnable )
&& ( extendedDynamicState3SampleLocationsEnable == rhs.extendedDynamicState3SampleLocationsEnable )
&& ( extendedDynamicState3ColorBlendAdvanced == rhs.extendedDynamicState3ColorBlendAdvanced )
&& ( extendedDynamicState3ProvokingVertexMode == rhs.extendedDynamicState3ProvokingVertexMode )
&& ( extendedDynamicState3LineRasterizationMode == rhs.extendedDynamicState3LineRasterizationMode )
&& ( extendedDynamicState3LineStippleEnable == rhs.extendedDynamicState3LineStippleEnable )
&& ( extendedDynamicState3DepthClipNegativeOneToOne == rhs.extendedDynamicState3DepthClipNegativeOneToOne )
&& ( extendedDynamicState3ViewportWScalingEnable == rhs.extendedDynamicState3ViewportWScalingEnable )
&& ( extendedDynamicState3ViewportSwizzle == rhs.extendedDynamicState3ViewportSwizzle )
&& ( extendedDynamicState3CoverageToColorEnable == rhs.extendedDynamicState3CoverageToColorEnable )
&& ( extendedDynamicState3CoverageToColorLocation == rhs.extendedDynamicState3CoverageToColorLocation )
&& ( extendedDynamicState3CoverageModulationMode == rhs.extendedDynamicState3CoverageModulationMode )
&& ( extendedDynamicState3CoverageModulationTableEnable == rhs.extendedDynamicState3CoverageModulationTableEnable )
&& ( extendedDynamicState3CoverageModulationTable == rhs.extendedDynamicState3CoverageModulationTable )
&& ( extendedDynamicState3CoverageReductionMode == rhs.extendedDynamicState3CoverageReductionMode )
&& ( extendedDynamicState3RepresentativeFragmentTestEnable == rhs.extendedDynamicState3RepresentativeFragmentTestEnable )
&& ( extendedDynamicState3ShadingRateImageEnable == rhs.extendedDynamicState3ShadingRateImageEnable );
#endif
}
bool operator!=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT>
{
using Type = PhysicalDeviceExtendedDynamicState3FeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceExtendedDynamicState3PropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicState3PropertiesEXT.html
struct PhysicalDeviceExtendedDynamicState3PropertiesEXT
{
using NativeType = VkPhysicalDeviceExtendedDynamicState3PropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dynamicPrimitiveTopologyUnrestricted{ dynamicPrimitiveTopologyUnrestricted_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExtendedDynamicState3PropertiesEXT( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExtendedDynamicState3PropertiesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState3PropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3PropertiesEXT*>( this );
}
operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3PropertiesEXT*>( this );
}
operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3PropertiesEXT*>( this );
}
operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3PropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dynamicPrimitiveTopologyUnrestricted );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dynamicPrimitiveTopologyUnrestricted == rhs.dynamicPrimitiveTopologyUnrestricted );
#endif
}
bool operator!=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT>
{
using Type = PhysicalDeviceExtendedDynamicState3PropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicStateFeaturesEXT.html
struct PhysicalDeviceExtendedDynamicStateFeaturesEXT
{
using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, extendedDynamicState{ extendedDynamicState_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExtendedDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState = extendedDynamicState_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
}
operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
}
operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
}
operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, extendedDynamicState );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( extendedDynamicState == rhs.extendedDynamicState );
#endif
}
bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT>
{
using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV.html
struct PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV
{
using NativeType = VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, extendedSparseAddressSpace{ extendedSparseAddressSpace_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( *reinterpret_cast<PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & operator=( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & operator=( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & setExtendedSparseAddressSpace( VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace_ ) VULKAN_HPP_NOEXCEPT
{
extendedSparseAddressSpace = extendedSparseAddressSpace_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV*>( this );
}
operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV*>( this );
}
operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV*>( this );
}
operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, extendedSparseAddressSpace );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( extendedSparseAddressSpace == rhs.extendedSparseAddressSpace );
#endif
}
bool operator!=( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV>
{
using Type = PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV.html
struct PhysicalDeviceExtendedSparseAddressSpacePropertiesNV
{
using NativeType = VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpacePropertiesNV(VULKAN_HPP_NAMESPACE::DeviceSize extendedSparseAddressSpaceSize_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags extendedSparseImageUsageFlags_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags extendedSparseBufferUsageFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, extendedSparseAddressSpaceSize{ extendedSparseAddressSpaceSize_ }, extendedSparseImageUsageFlags{ extendedSparseImageUsageFlags_ }, extendedSparseBufferUsageFlags{ extendedSparseBufferUsageFlags_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( *reinterpret_cast<PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceExtendedSparseAddressSpacePropertiesNV & operator=( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExtendedSparseAddressSpacePropertiesNV & operator=( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV*>( this );
}
operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV*>( this );
}
operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV*>( this );
}
operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::BufferUsageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, extendedSparseAddressSpaceSize, extendedSparseImageUsageFlags, extendedSparseBufferUsageFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( extendedSparseAddressSpaceSize == rhs.extendedSparseAddressSpaceSize )
&& ( extendedSparseImageUsageFlags == rhs.extendedSparseImageUsageFlags )
&& ( extendedSparseBufferUsageFlags == rhs.extendedSparseBufferUsageFlags );
#endif
}
bool operator!=( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize extendedSparseAddressSpaceSize = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags extendedSparseImageUsageFlags = {};
VULKAN_HPP_NAMESPACE::BufferUsageFlags extendedSparseBufferUsageFlags = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV>
{
using Type = PhysicalDeviceExtendedSparseAddressSpacePropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceExternalBufferInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalBufferInfo.html
struct PhysicalDeviceExternalBufferInfo
{
using NativeType = VkPhysicalDeviceExternalBufferInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, usage{ usage_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalBufferInfo( *reinterpret_cast<PhysicalDeviceExternalBufferInfo const *>( &rhs ) )
{}
PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( this );
}
operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>( this );
}
operator VkPhysicalDeviceExternalBufferInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( this );
}
operator VkPhysicalDeviceExternalBufferInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCreateFlags const &, VULKAN_HPP_NAMESPACE::BufferUsageFlags const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, usage, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default;
#else
bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( usage == rhs.usage )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalBufferInfo>
{
using Type = PhysicalDeviceExternalBufferInfo;
};
using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
// wrapper struct for struct VkPhysicalDeviceExternalComputeQueuePropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalComputeQueuePropertiesNV.html
struct PhysicalDeviceExternalComputeQueuePropertiesNV
{
using NativeType = VkPhysicalDeviceExternalComputeQueuePropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalComputeQueuePropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalComputeQueuePropertiesNV(uint32_t externalDataSize_ = {}, uint32_t maxExternalQueues_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, externalDataSize{ externalDataSize_ }, maxExternalQueues{ maxExternalQueues_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalComputeQueuePropertiesNV( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalComputeQueuePropertiesNV( VkPhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalComputeQueuePropertiesNV( *reinterpret_cast<PhysicalDeviceExternalComputeQueuePropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceExternalComputeQueuePropertiesNV & operator=( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExternalComputeQueuePropertiesNV & operator=( VkPhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalComputeQueuePropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceExternalComputeQueuePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalComputeQueuePropertiesNV*>( this );
}
operator VkPhysicalDeviceExternalComputeQueuePropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalComputeQueuePropertiesNV*>( this );
}
operator VkPhysicalDeviceExternalComputeQueuePropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExternalComputeQueuePropertiesNV*>( this );
}
operator VkPhysicalDeviceExternalComputeQueuePropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExternalComputeQueuePropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, externalDataSize, maxExternalQueues );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExternalComputeQueuePropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( externalDataSize == rhs.externalDataSize )
&& ( maxExternalQueues == rhs.maxExternalQueues );
#endif
}
bool operator!=( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalComputeQueuePropertiesNV;
void * pNext = {};
uint32_t externalDataSize = {};
uint32_t maxExternalQueues = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalComputeQueuePropertiesNV>
{
using Type = PhysicalDeviceExternalComputeQueuePropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceExternalFenceInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalFenceInfo.html
struct PhysicalDeviceExternalFenceInfo
{
using NativeType = VkPhysicalDeviceExternalFenceInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalFenceInfo( *reinterpret_cast<PhysicalDeviceExternalFenceInfo const *>( &rhs ) )
{}
PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( this );
}
operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>( this );
}
operator VkPhysicalDeviceExternalFenceInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( this );
}
operator VkPhysicalDeviceExternalFenceInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default;
#else
bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFenceInfo>
{
using Type = PhysicalDeviceExternalFenceInfo;
};
using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
// wrapper struct for struct VkPhysicalDeviceExternalFormatResolveFeaturesANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalFormatResolveFeaturesANDROID.html
struct PhysicalDeviceExternalFormatResolveFeaturesANDROID
{
using NativeType = VkPhysicalDeviceExternalFormatResolveFeaturesANDROID;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolveFeaturesANDROID(VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, externalFormatResolve{ externalFormatResolve_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolveFeaturesANDROID( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalFormatResolveFeaturesANDROID( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalFormatResolveFeaturesANDROID( *reinterpret_cast<PhysicalDeviceExternalFormatResolveFeaturesANDROID const *>( &rhs ) )
{}
PhysicalDeviceExternalFormatResolveFeaturesANDROID & operator=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExternalFormatResolveFeaturesANDROID & operator=( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & setExternalFormatResolve( VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve_ ) VULKAN_HPP_NOEXCEPT
{
externalFormatResolve = externalFormatResolve_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalFormatResolveFeaturesANDROID*>( this );
}
operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalFormatResolveFeaturesANDROID*>( this );
}
operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExternalFormatResolveFeaturesANDROID*>( this );
}
operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExternalFormatResolveFeaturesANDROID*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, externalFormatResolve );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & ) const = default;
#else
bool operator==( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( externalFormatResolve == rhs.externalFormatResolve );
#endif
}
bool operator!=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID>
{
using Type = PhysicalDeviceExternalFormatResolveFeaturesANDROID;
};
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
// wrapper struct for struct VkPhysicalDeviceExternalFormatResolvePropertiesANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalFormatResolvePropertiesANDROID.html
struct PhysicalDeviceExternalFormatResolvePropertiesANDROID
{
using NativeType = VkPhysicalDeviceExternalFormatResolvePropertiesANDROID;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolvePropertiesANDROID(VULKAN_HPP_NAMESPACE::Bool32 nullColorAttachmentWithExternalFormatResolve_ = {}, VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetX_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetY_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, nullColorAttachmentWithExternalFormatResolve{ nullColorAttachmentWithExternalFormatResolve_ }, externalFormatResolveChromaOffsetX{ externalFormatResolveChromaOffsetX_ }, externalFormatResolveChromaOffsetY{ externalFormatResolveChromaOffsetY_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolvePropertiesANDROID( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalFormatResolvePropertiesANDROID( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalFormatResolvePropertiesANDROID( *reinterpret_cast<PhysicalDeviceExternalFormatResolvePropertiesANDROID const *>( &rhs ) )
{}
PhysicalDeviceExternalFormatResolvePropertiesANDROID & operator=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExternalFormatResolvePropertiesANDROID & operator=( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalFormatResolvePropertiesANDROID*>( this );
}
operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalFormatResolvePropertiesANDROID*>( this );
}
operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExternalFormatResolvePropertiesANDROID*>( this );
}
operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExternalFormatResolvePropertiesANDROID*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, nullColorAttachmentWithExternalFormatResolve, externalFormatResolveChromaOffsetX, externalFormatResolveChromaOffsetY );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & ) const = default;
#else
bool operator==( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( nullColorAttachmentWithExternalFormatResolve == rhs.nullColorAttachmentWithExternalFormatResolve )
&& ( externalFormatResolveChromaOffsetX == rhs.externalFormatResolveChromaOffsetX )
&& ( externalFormatResolveChromaOffsetY == rhs.externalFormatResolveChromaOffsetY );
#endif
}
bool operator!=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 nullColorAttachmentWithExternalFormatResolve = {};
VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetX = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetY = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID>
{
using Type = PhysicalDeviceExternalFormatResolvePropertiesANDROID;
};
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
// wrapper struct for struct VkPhysicalDeviceExternalImageFormatInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalImageFormatInfo.html
struct PhysicalDeviceExternalImageFormatInfo
{
using NativeType = VkPhysicalDeviceExternalImageFormatInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalImageFormatInfo( *reinterpret_cast<PhysicalDeviceExternalImageFormatInfo const *>( &rhs ) )
{}
PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo*>( this );
}
operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>( this );
}
operator VkPhysicalDeviceExternalImageFormatInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo*>( this );
}
operator VkPhysicalDeviceExternalImageFormatInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default;
#else
bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalImageFormatInfo>
{
using Type = PhysicalDeviceExternalImageFormatInfo;
};
using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
// wrapper struct for struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalMemoryHostPropertiesEXT.html
struct PhysicalDeviceExternalMemoryHostPropertiesEXT
{
using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minImportedHostPointerAlignment{ minImportedHostPointerAlignment_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalMemoryHostPropertiesEXT( *reinterpret_cast<PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
}
operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
}
operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
}
operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minImportedHostPointerAlignment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
#endif
}
bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT>
{
using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceExternalMemoryRDMAFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalMemoryRDMAFeaturesNV.html
struct PhysicalDeviceExternalMemoryRDMAFeaturesNV
{
using NativeType = VkPhysicalDeviceExternalMemoryRDMAFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, externalMemoryRDMA{ externalMemoryRDMA_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalMemoryRDMAFeaturesNV( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalMemoryRDMAFeaturesNV( *reinterpret_cast<PhysicalDeviceExternalMemoryRDMAFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setExternalMemoryRDMA( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ ) VULKAN_HPP_NOEXCEPT
{
externalMemoryRDMA = externalMemoryRDMA_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryRDMAFeaturesNV*>( this );
}
operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalMemoryRDMAFeaturesNV*>( this );
}
operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExternalMemoryRDMAFeaturesNV*>( this );
}
operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExternalMemoryRDMAFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, externalMemoryRDMA );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( externalMemoryRDMA == rhs.externalMemoryRDMA );
#endif
}
bool operator!=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV>
{
using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV;
};
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
// wrapper struct for struct VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX.html
struct PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX
{
using NativeType = VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX(VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, screenBufferImport{ screenBufferImport_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( *reinterpret_cast<PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const *>( &rhs ) )
{}
PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & operator=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & operator=( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & setScreenBufferImport( VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport_ ) VULKAN_HPP_NOEXCEPT
{
screenBufferImport = screenBufferImport_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX*>( this );
}
operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX*>( this );
}
operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX*>( this );
}
operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, screenBufferImport );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & ) const = default;
#else
bool operator==( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( screenBufferImport == rhs.screenBufferImport );
#endif
}
bool operator!=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX>
{
using Type = PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
};
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
// wrapper struct for struct VkPhysicalDeviceExternalSemaphoreInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalSemaphoreInfo.html
struct PhysicalDeviceExternalSemaphoreInfo
{
using NativeType = VkPhysicalDeviceExternalSemaphoreInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast<PhysicalDeviceExternalSemaphoreInfo const *>( &rhs ) )
{}
PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( this );
}
operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>( this );
}
operator VkPhysicalDeviceExternalSemaphoreInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( this );
}
operator VkPhysicalDeviceExternalSemaphoreInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default;
#else
bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalSemaphoreInfo>
{
using Type = PhysicalDeviceExternalSemaphoreInfo;
};
using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
// wrapper struct for struct VkPhysicalDeviceExternalTensorInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalTensorInfoARM.html
struct PhysicalDeviceExternalTensorInfoARM
{
using NativeType = VkPhysicalDeviceExternalTensorInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalTensorInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalTensorInfoARM(VULKAN_HPP_NAMESPACE::TensorCreateFlagsARM flags_ = {}, const VULKAN_HPP_NAMESPACE::TensorDescriptionARM * pDescription_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pDescription{ pDescription_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalTensorInfoARM( PhysicalDeviceExternalTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalTensorInfoARM( VkPhysicalDeviceExternalTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalTensorInfoARM( *reinterpret_cast<PhysicalDeviceExternalTensorInfoARM const *>( &rhs ) )
{}
PhysicalDeviceExternalTensorInfoARM & operator=( PhysicalDeviceExternalTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceExternalTensorInfoARM & operator=( VkPhysicalDeviceExternalTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM & setFlags( VULKAN_HPP_NAMESPACE::TensorCreateFlagsARM flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM & setPDescription( const VULKAN_HPP_NAMESPACE::TensorDescriptionARM * pDescription_ ) VULKAN_HPP_NOEXCEPT
{
pDescription = pDescription_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceExternalTensorInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalTensorInfoARM*>( this );
}
operator VkPhysicalDeviceExternalTensorInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalTensorInfoARM*>( this );
}
operator VkPhysicalDeviceExternalTensorInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceExternalTensorInfoARM*>( this );
}
operator VkPhysicalDeviceExternalTensorInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceExternalTensorInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TensorCreateFlagsARM const &, const VULKAN_HPP_NAMESPACE::TensorDescriptionARM * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pDescription, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceExternalTensorInfoARM const & ) const = default;
#else
bool operator==( PhysicalDeviceExternalTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( pDescription == rhs.pDescription )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( PhysicalDeviceExternalTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalTensorInfoARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TensorCreateFlagsARM flags = {};
const VULKAN_HPP_NAMESPACE::TensorDescriptionARM * pDescription = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalTensorInfoARM>
{
using Type = PhysicalDeviceExternalTensorInfoARM;
};
// wrapper struct for struct VkPhysicalDeviceFaultFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFaultFeaturesEXT.html
struct PhysicalDeviceFaultFeaturesEXT
{
using NativeType = VkPhysicalDeviceFaultFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFaultFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceFault{ deviceFault_ }, deviceFaultVendorBinary{ deviceFaultVendorBinary_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFaultFeaturesEXT( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFaultFeaturesEXT( *reinterpret_cast<PhysicalDeviceFaultFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceFaultFeaturesEXT & operator=( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFaultFeaturesEXT & operator=( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFault( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ ) VULKAN_HPP_NOEXCEPT
{
deviceFault = deviceFault_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFaultVendorBinary( VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ ) VULKAN_HPP_NOEXCEPT
{
deviceFaultVendorBinary = deviceFaultVendorBinary_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceFaultFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFaultFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFaultFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFaultFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFaultFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFaultFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFaultFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFaultFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceFault, deviceFaultVendorBinary );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFaultFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceFault == rhs.deviceFault )
&& ( deviceFaultVendorBinary == rhs.deviceFaultVendorBinary );
#endif
}
bool operator!=( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFaultFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceFault = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFaultFeaturesEXT>
{
using Type = PhysicalDeviceFaultFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceFeatures2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFeatures2.html
struct PhysicalDeviceFeatures2
{
using NativeType = VkPhysicalDeviceFeatures2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, features{ features_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFeatures2( *reinterpret_cast<PhysicalDeviceFeatures2 const *>( &rhs ) )
{}
PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT
{
features = features_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFeatures2*>( this );
}
operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFeatures2*>( this );
}
operator VkPhysicalDeviceFeatures2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFeatures2*>( this );
}
operator VkPhysicalDeviceFeatures2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFeatures2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, features );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default;
#else
bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( features == rhs.features );
#endif
}
bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFeatures2>
{
using Type = PhysicalDeviceFeatures2;
};
using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
// wrapper struct for struct VkPhysicalDeviceFloatControlsProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFloatControlsProperties.html
struct PhysicalDeviceFloatControlsProperties
{
using NativeType = VkPhysicalDeviceFloatControlsProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties(VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, denormBehaviorIndependence{ denormBehaviorIndependence_ }, roundingModeIndependence{ roundingModeIndependence_ }, shaderSignedZeroInfNanPreserveFloat16{ shaderSignedZeroInfNanPreserveFloat16_ }, shaderSignedZeroInfNanPreserveFloat32{ shaderSignedZeroInfNanPreserveFloat32_ }, shaderSignedZeroInfNanPreserveFloat64{ shaderSignedZeroInfNanPreserveFloat64_ }, shaderDenormPreserveFloat16{ shaderDenormPreserveFloat16_ }, shaderDenormPreserveFloat32{ shaderDenormPreserveFloat32_ }, shaderDenormPreserveFloat64{ shaderDenormPreserveFloat64_ }, shaderDenormFlushToZeroFloat16{ shaderDenormFlushToZeroFloat16_ }, shaderDenormFlushToZeroFloat32{ shaderDenormFlushToZeroFloat32_ }, shaderDenormFlushToZeroFloat64{ shaderDenormFlushToZeroFloat64_ }, shaderRoundingModeRTEFloat16{ shaderRoundingModeRTEFloat16_ }, shaderRoundingModeRTEFloat32{ shaderRoundingModeRTEFloat32_ }, shaderRoundingModeRTEFloat64{ shaderRoundingModeRTEFloat64_ }, shaderRoundingModeRTZFloat16{ shaderRoundingModeRTZFloat16_ }, shaderRoundingModeRTZFloat32{ shaderRoundingModeRTZFloat32_ }, shaderRoundingModeRTZFloat64{ shaderRoundingModeRTZFloat64_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFloatControlsProperties( *reinterpret_cast<PhysicalDeviceFloatControlsProperties const *>( &rhs ) )
{}
PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties*>( this );
}
operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFloatControlsProperties*>( this );
}
operator VkPhysicalDeviceFloatControlsProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties*>( this );
}
operator VkPhysicalDeviceFloatControlsProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFloatControlsProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, denormBehaviorIndependence, roundingModeIndependence, shaderSignedZeroInfNanPreserveFloat16, shaderSignedZeroInfNanPreserveFloat32, shaderSignedZeroInfNanPreserveFloat64, shaderDenormPreserveFloat16, shaderDenormPreserveFloat32, shaderDenormPreserveFloat64, shaderDenormFlushToZeroFloat16, shaderDenormFlushToZeroFloat32, shaderDenormFlushToZeroFloat64, shaderRoundingModeRTEFloat16, shaderRoundingModeRTEFloat32, shaderRoundingModeRTEFloat64, shaderRoundingModeRTZFloat16, shaderRoundingModeRTZFloat32, shaderRoundingModeRTZFloat64 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
&& ( roundingModeIndependence == rhs.roundingModeIndependence )
&& ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
&& ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
&& ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
&& ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
&& ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
&& ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
&& ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
&& ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
&& ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
&& ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
&& ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
&& ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
&& ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
&& ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
&& ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 );
#endif
}
bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFloatControlsProperties>
{
using Type = PhysicalDeviceFloatControlsProperties;
};
using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;
// wrapper struct for struct VkPhysicalDeviceFormatPackFeaturesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFormatPackFeaturesARM.html
struct PhysicalDeviceFormatPackFeaturesARM
{
using NativeType = VkPhysicalDeviceFormatPackFeaturesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFormatPackFeaturesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFormatPackFeaturesARM(VULKAN_HPP_NAMESPACE::Bool32 formatPack_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, formatPack{ formatPack_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFormatPackFeaturesARM( PhysicalDeviceFormatPackFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFormatPackFeaturesARM( VkPhysicalDeviceFormatPackFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFormatPackFeaturesARM( *reinterpret_cast<PhysicalDeviceFormatPackFeaturesARM const *>( &rhs ) )
{}
PhysicalDeviceFormatPackFeaturesARM & operator=( PhysicalDeviceFormatPackFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFormatPackFeaturesARM & operator=( VkPhysicalDeviceFormatPackFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFormatPackFeaturesARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFormatPackFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFormatPackFeaturesARM & setFormatPack( VULKAN_HPP_NAMESPACE::Bool32 formatPack_ ) VULKAN_HPP_NOEXCEPT
{
formatPack = formatPack_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceFormatPackFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFormatPackFeaturesARM*>( this );
}
operator VkPhysicalDeviceFormatPackFeaturesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFormatPackFeaturesARM*>( this );
}
operator VkPhysicalDeviceFormatPackFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFormatPackFeaturesARM*>( this );
}
operator VkPhysicalDeviceFormatPackFeaturesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFormatPackFeaturesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, formatPack );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFormatPackFeaturesARM const & ) const = default;
#else
bool operator==( PhysicalDeviceFormatPackFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( formatPack == rhs.formatPack );
#endif
}
bool operator!=( PhysicalDeviceFormatPackFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFormatPackFeaturesARM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 formatPack = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFormatPackFeaturesARM>
{
using Type = PhysicalDeviceFormatPackFeaturesARM;
};
// wrapper struct for struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMap2FeaturesEXT.html
struct PhysicalDeviceFragmentDensityMap2FeaturesEXT
{
using NativeType = VkPhysicalDeviceFragmentDensityMap2FeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fragmentDensityMapDeferred{ fragmentDensityMapDeferred_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentDensityMap2FeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMapDeferred = fragmentDensityMapDeferred_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fragmentDensityMapDeferred );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred );
#endif
}
bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT>
{
using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMap2PropertiesEXT.html
struct PhysicalDeviceFragmentDensityMap2PropertiesEXT
{
using NativeType = VkPhysicalDeviceFragmentDensityMap2PropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, uint32_t maxSubsampledArrayLayers_ = {}, uint32_t maxDescriptorSetSubsampledSamplers_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, subsampledLoads{ subsampledLoads_ }, subsampledCoarseReconstructionEarlyAccess{ subsampledCoarseReconstructionEarlyAccess_ }, maxSubsampledArrayLayers{ maxSubsampledArrayLayers_ }, maxDescriptorSetSubsampledSamplers{ maxDescriptorSetSubsampledSamplers_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentDensityMap2PropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, subsampledLoads, subsampledCoarseReconstructionEarlyAccess, maxSubsampledArrayLayers, maxDescriptorSetSubsampledSamplers );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( subsampledLoads == rhs.subsampledLoads )
&& ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess )
&& ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers )
&& ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers );
#endif
}
bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {};
VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {};
uint32_t maxSubsampledArrayLayers = {};
uint32_t maxDescriptorSetSubsampledSamplers = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT>
{
using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapFeaturesEXT.html
struct PhysicalDeviceFragmentDensityMapFeaturesEXT
{
using NativeType = VkPhysicalDeviceFragmentDensityMapFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fragmentDensityMap{ fragmentDensityMap_ }, fragmentDensityMapDynamic{ fragmentDensityMapDynamic_ }, fragmentDensityMapNonSubsampledImages{ fragmentDensityMapNonSubsampledImages_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentDensityMapFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMap = fragmentDensityMap_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMapDynamic = fragmentDensityMapDynamic_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fragmentDensityMap == rhs.fragmentDensityMap )
&& ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic )
&& ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages );
#endif
}
bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT>
{
using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT.html
struct PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT
{
using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fragmentDensityMapOffset{ fragmentDensityMapOffset_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & setFragmentDensityMapOffset( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMapOffset = fragmentDensityMapOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fragmentDensityMapOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset );
#endif
}
bool operator!=( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesEXT>
{
using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT;
};
using PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT;
// wrapper struct for struct VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT.html
struct PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT
{
using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fragmentDensityOffsetGranularity{ fragmentDensityOffsetGranularity_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fragmentDensityOffsetGranularity );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity );
#endif
}
bool operator!=( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesEXT>
{
using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT;
};
using PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT;
// wrapper struct for struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapPropertiesEXT.html
struct PhysicalDeviceFragmentDensityMapPropertiesEXT
{
using NativeType = VkPhysicalDeviceFragmentDensityMapPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minFragmentDensityTexelSize{ minFragmentDensityTexelSize_ }, maxFragmentDensityTexelSize{ maxFragmentDensityTexelSize_ }, fragmentDensityInvocations{ fragmentDensityInvocations_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentDensityMapPropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
}
operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize )
&& ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize )
&& ( fragmentDensityInvocations == rhs.fragmentDensityInvocations );
#endif
}
bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {};
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT>
{
using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR.html
struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR
{
using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fragmentShaderBarycentric{ fragmentShaderBarycentric_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShaderBarycentric = fragmentShaderBarycentric_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR*>( this );
}
operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR*>( this );
}
operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR*>( this );
}
operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fragmentShaderBarycentric );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric );
#endif
}
bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR>
{
using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
};
using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
// wrapper struct for struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR.html
struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR
{
using NativeType = VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, triStripVertexOrderIndependentOfProvokingVertex{ triStripVertexOrderIndependentOfProvokingVertex_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const *>( &rhs ) )
{}
PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & operator=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR*>( this );
}
operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR*>( this );
}
operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR*>( this );
}
operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, triStripVertexOrderIndependentOfProvokingVertex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( triStripVertexOrderIndependentOfProvokingVertex == rhs.triStripVertexOrderIndependentOfProvokingVertex );
#endif
}
bool operator!=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR>
{
using Type = PhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
};
// wrapper struct for struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT.html
struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
{
using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fragmentShaderSampleInterlock{ fragmentShaderSampleInterlock_ }, fragmentShaderPixelInterlock{ fragmentShaderPixelInterlock_ }, fragmentShaderShadingRateInterlock{ fragmentShaderShadingRateInterlock_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentShaderInterlockFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock )
&& ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock )
&& ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock );
#endif
}
bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT>
{
using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV.html
struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV
{
using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {}, VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fragmentShadingRateEnums{ fragmentShadingRateEnums_ }, supersampleFragmentShadingRates{ supersampleFragmentShadingRates_ }, noInvocationFragmentShadingRates{ noInvocationFragmentShadingRates_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShadingRateEnums = fragmentShadingRateEnums_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setSupersampleFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
{
supersampleFragmentShadingRates = supersampleFragmentShadingRates_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setNoInvocationFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
{
noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV*>( this );
}
operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV*>( this );
}
operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV*>( this );
}
operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums )
&& ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates )
&& ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates );
#endif
}
bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {};
VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {};
VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV>
{
using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV.html
struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV
{
using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxFragmentShadingRateInvocationCount{ maxFragmentShadingRateInvocationCount_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV*>( this );
}
operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV*>( this );
}
operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV*>( this );
}
operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxFragmentShadingRateInvocationCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount );
#endif
}
bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV>
{
using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateFeaturesKHR.html
struct PhysicalDeviceFragmentShadingRateFeaturesKHR
{
using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineFragmentShadingRate{ pipelineFragmentShadingRate_ }, primitiveFragmentShadingRate{ primitiveFragmentShadingRate_ }, attachmentFragmentShadingRate{ attachmentFragmentShadingRate_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentShadingRateFeaturesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
{
pipelineFragmentShadingRate = pipelineFragmentShadingRate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
{
primitiveFragmentShadingRate = primitiveFragmentShadingRate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setAttachmentFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
{
attachmentFragmentShadingRate = attachmentFragmentShadingRate_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateFeaturesKHR*>( this );
}
operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateFeaturesKHR*>( this );
}
operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateFeaturesKHR*>( this );
}
operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentShadingRateFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate )
&& ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate )
&& ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate );
#endif
}
bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {};
VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {};
VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR>
{
using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceFragmentShadingRateKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateKHR.html
struct PhysicalDeviceFragmentShadingRateKHR
{
using NativeType = VkPhysicalDeviceFragmentShadingRateKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, sampleCounts{ sampleCounts_ }, fragmentSize{ fragmentSize_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateKHR const *>( &rhs ) )
{}
PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateKHR*>( this );
}
operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR*>( this );
}
operator VkPhysicalDeviceFragmentShadingRateKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateKHR*>( this );
}
operator VkPhysicalDeviceFragmentShadingRateKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, sampleCounts, fragmentSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( sampleCounts == rhs.sampleCounts )
&& ( fragmentSize == rhs.fragmentSize );
#endif
}
bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateKHR>
{
using Type = PhysicalDeviceFragmentShadingRateKHR;
};
// wrapper struct for struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRatePropertiesKHR.html
struct PhysicalDeviceFragmentShadingRatePropertiesKHR
{
using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR(VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, uint32_t maxFragmentSizeAspectRatio_ = {}, uint32_t maxFragmentShadingRateCoverageSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minFragmentShadingRateAttachmentTexelSize{ minFragmentShadingRateAttachmentTexelSize_ }, maxFragmentShadingRateAttachmentTexelSize{ maxFragmentShadingRateAttachmentTexelSize_ }, maxFragmentShadingRateAttachmentTexelSizeAspectRatio{ maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ }, primitiveFragmentShadingRateWithMultipleViewports{ primitiveFragmentShadingRateWithMultipleViewports_ }, layeredShadingRateAttachments{ layeredShadingRateAttachments_ }, fragmentShadingRateNonTrivialCombinerOps{ fragmentShadingRateNonTrivialCombinerOps_ }, maxFragmentSize{ maxFragmentSize_ }, maxFragmentSizeAspectRatio{ maxFragmentSizeAspectRatio_ }, maxFragmentShadingRateCoverageSamples{ maxFragmentShadingRateCoverageSamples_ }, maxFragmentShadingRateRasterizationSamples{ maxFragmentShadingRateRasterizationSamples_ }, fragmentShadingRateWithShaderDepthStencilWrites{ fragmentShadingRateWithShaderDepthStencilWrites_ }, fragmentShadingRateWithSampleMask{ fragmentShadingRateWithSampleMask_ }, fragmentShadingRateWithShaderSampleMask{ fragmentShadingRateWithShaderSampleMask_ }, fragmentShadingRateWithConservativeRasterization{ fragmentShadingRateWithConservativeRasterization_ }, fragmentShadingRateWithFragmentShaderInterlock{ fragmentShadingRateWithFragmentShaderInterlock_ }, fragmentShadingRateWithCustomSampleLocations{ fragmentShadingRateWithCustomSampleLocations_ }, fragmentShadingRateStrictMultiplyCombiner{ fragmentShadingRateStrictMultiplyCombiner_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentShadingRatePropertiesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs ) )
{}
PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRatePropertiesKHR*>( this );
}
operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRatePropertiesKHR*>( this );
}
operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFragmentShadingRatePropertiesKHR*>( this );
}
operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFragmentShadingRatePropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minFragmentShadingRateAttachmentTexelSize, maxFragmentShadingRateAttachmentTexelSize, maxFragmentShadingRateAttachmentTexelSizeAspectRatio, primitiveFragmentShadingRateWithMultipleViewports, layeredShadingRateAttachments, fragmentShadingRateNonTrivialCombinerOps, maxFragmentSize, maxFragmentSizeAspectRatio, maxFragmentShadingRateCoverageSamples, maxFragmentShadingRateRasterizationSamples, fragmentShadingRateWithShaderDepthStencilWrites, fragmentShadingRateWithSampleMask, fragmentShadingRateWithShaderSampleMask, fragmentShadingRateWithConservativeRasterization, fragmentShadingRateWithFragmentShaderInterlock, fragmentShadingRateWithCustomSampleLocations, fragmentShadingRateStrictMultiplyCombiner );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize )
&& ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize )
&& ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio )
&& ( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports )
&& ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments )
&& ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps )
&& ( maxFragmentSize == rhs.maxFragmentSize )
&& ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio )
&& ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples )
&& ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples )
&& ( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites )
&& ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask )
&& ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask )
&& ( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization )
&& ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock )
&& ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations )
&& ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner );
#endif
}
bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {};
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {};
uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {};
VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {};
VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {};
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {};
uint32_t maxFragmentSizeAspectRatio = {};
uint32_t maxFragmentShadingRateCoverageSamples = {};
VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR>
{
using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR;
};
// wrapper struct for struct VkPhysicalDeviceFrameBoundaryFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFrameBoundaryFeaturesEXT.html
struct PhysicalDeviceFrameBoundaryFeaturesEXT
{
using NativeType = VkPhysicalDeviceFrameBoundaryFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 frameBoundary_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, frameBoundary{ frameBoundary_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFrameBoundaryFeaturesEXT( VkPhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFrameBoundaryFeaturesEXT( *reinterpret_cast<PhysicalDeviceFrameBoundaryFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceFrameBoundaryFeaturesEXT & operator=( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceFrameBoundaryFeaturesEXT & operator=( VkPhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT & setFrameBoundary( VULKAN_HPP_NAMESPACE::Bool32 frameBoundary_ ) VULKAN_HPP_NOEXCEPT
{
frameBoundary = frameBoundary_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceFrameBoundaryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFrameBoundaryFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFrameBoundaryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFrameBoundaryFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFrameBoundaryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceFrameBoundaryFeaturesEXT*>( this );
}
operator VkPhysicalDeviceFrameBoundaryFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceFrameBoundaryFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, frameBoundary );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceFrameBoundaryFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( frameBoundary == rhs.frameBoundary );
#endif
}
bool operator!=( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 frameBoundary = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT>
{
using Type = PhysicalDeviceFrameBoundaryFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceGlobalPriorityQueryFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGlobalPriorityQueryFeatures.html
struct PhysicalDeviceGlobalPriorityQueryFeatures
{
using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeatures(VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, globalPriorityQuery{ globalPriorityQuery_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeatures( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceGlobalPriorityQueryFeatures( VkPhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceGlobalPriorityQueryFeatures( *reinterpret_cast<PhysicalDeviceGlobalPriorityQueryFeatures const *>( &rhs ) )
{}
PhysicalDeviceGlobalPriorityQueryFeatures & operator=( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceGlobalPriorityQueryFeatures & operator=( VkPhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures & setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT
{
globalPriorityQuery = globalPriorityQuery_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceGlobalPriorityQueryFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceGlobalPriorityQueryFeatures*>( this );
}
operator VkPhysicalDeviceGlobalPriorityQueryFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceGlobalPriorityQueryFeatures*>( this );
}
operator VkPhysicalDeviceGlobalPriorityQueryFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceGlobalPriorityQueryFeatures*>( this );
}
operator VkPhysicalDeviceGlobalPriorityQueryFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceGlobalPriorityQueryFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, globalPriorityQuery );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( globalPriorityQuery == rhs.globalPriorityQuery );
#endif
}
bool operator!=( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures>
{
using Type = PhysicalDeviceGlobalPriorityQueryFeatures;
};
using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeatures;
using PhysicalDeviceGlobalPriorityQueryFeaturesKHR = PhysicalDeviceGlobalPriorityQueryFeatures;
// wrapper struct for struct VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT.html
struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT
{
using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, graphicsPipelineLibrary{ graphicsPipelineLibrary_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( *reinterpret_cast<PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setGraphicsPipelineLibrary( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ ) VULKAN_HPP_NOEXCEPT
{
graphicsPipelineLibrary = graphicsPipelineLibrary_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT*>( this );
}
operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT*>( this );
}
operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT*>( this );
}
operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, graphicsPipelineLibrary );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( graphicsPipelineLibrary == rhs.graphicsPipelineLibrary );
#endif
}
bool operator!=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>
{
using Type = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT.html
struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT
{
using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ = {}, VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, graphicsPipelineLibraryFastLinking{ graphicsPipelineLibraryFastLinking_ }, graphicsPipelineLibraryIndependentInterpolationDecoration{ graphicsPipelineLibraryIndependentInterpolationDecoration_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( *reinterpret_cast<PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT*>( this );
}
operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT*>( this );
}
operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT*>( this );
}
operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, graphicsPipelineLibraryFastLinking, graphicsPipelineLibraryIndependentInterpolationDecoration );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( graphicsPipelineLibraryFastLinking == rhs.graphicsPipelineLibraryFastLinking )
&& ( graphicsPipelineLibraryIndependentInterpolationDecoration == rhs.graphicsPipelineLibraryIndependentInterpolationDecoration );
#endif
}
bool operator!=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking = {};
VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>
{
using Type = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceGroupProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGroupProperties.html
struct PhysicalDeviceGroupProperties
{
using NativeType = VkPhysicalDeviceGroupProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties(uint32_t physicalDeviceCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE> const & physicalDevices_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, physicalDeviceCount{ physicalDeviceCount_ }, physicalDevices{ physicalDevices_ }, subsetAllocation{ subsetAllocation_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceGroupProperties( *reinterpret_cast<PhysicalDeviceGroupProperties const *>( &rhs ) )
{}
PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceGroupProperties*>( this );
}
operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceGroupProperties*>( this );
}
operator VkPhysicalDeviceGroupProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceGroupProperties*>( this );
}
operator VkPhysicalDeviceGroupProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceGroupProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, physicalDeviceCount, physicalDevices, subsetAllocation );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = physicalDeviceCount <=> rhs.physicalDeviceCount; cmp != 0 ) return cmp;
for ( size_t i = 0; i < physicalDeviceCount; ++i )
{
if ( auto cmp = physicalDevices[i] <=> rhs.physicalDevices[i]; cmp != 0 ) return cmp;
}
if ( auto cmp = subsetAllocation <=> rhs.subsetAllocation; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( physicalDeviceCount == rhs.physicalDeviceCount )
&& ( memcmp( physicalDevices, rhs.physicalDevices, physicalDeviceCount * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 )
&& ( subsetAllocation == rhs.subsetAllocation );
}
bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
void * pNext = {};
uint32_t physicalDeviceCount = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> physicalDevices = {};
VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceGroupProperties>
{
using Type = PhysicalDeviceGroupProperties;
};
using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
// wrapper struct for struct VkPhysicalDeviceHdrVividFeaturesHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHdrVividFeaturesHUAWEI.html
struct PhysicalDeviceHdrVividFeaturesHUAWEI
{
using NativeType = VkPhysicalDeviceHdrVividFeaturesHUAWEI;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI(VULKAN_HPP_NAMESPACE::Bool32 hdrVivid_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, hdrVivid{ hdrVivid_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceHdrVividFeaturesHUAWEI( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceHdrVividFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceHdrVividFeaturesHUAWEI const *>( &rhs ) )
{}
PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHdrVividFeaturesHUAWEI const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setHdrVivid( VULKAN_HPP_NAMESPACE::Bool32 hdrVivid_ ) VULKAN_HPP_NOEXCEPT
{
hdrVivid = hdrVivid_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceHdrVividFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceHdrVividFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceHdrVividFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceHdrVividFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceHdrVividFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceHdrVividFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceHdrVividFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceHdrVividFeaturesHUAWEI*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, hdrVivid );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceHdrVividFeaturesHUAWEI const & ) const = default;
#else
bool operator==( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( hdrVivid == rhs.hdrVivid );
#endif
}
bool operator!=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 hdrVivid = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI>
{
using Type = PhysicalDeviceHdrVividFeaturesHUAWEI;
};
// wrapper struct for struct VkPhysicalDeviceHostImageCopyFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHostImageCopyFeatures.html
struct PhysicalDeviceHostImageCopyFeatures
{
using NativeType = VkPhysicalDeviceHostImageCopyFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeatures(VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, hostImageCopy{ hostImageCopy_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeatures( PhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceHostImageCopyFeatures( VkPhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceHostImageCopyFeatures( *reinterpret_cast<PhysicalDeviceHostImageCopyFeatures const *>( &rhs ) )
{}
PhysicalDeviceHostImageCopyFeatures & operator=( PhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceHostImageCopyFeatures & operator=( VkPhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures & setHostImageCopy( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT
{
hostImageCopy = hostImageCopy_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceHostImageCopyFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceHostImageCopyFeatures*>( this );
}
operator VkPhysicalDeviceHostImageCopyFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceHostImageCopyFeatures*>( this );
}
operator VkPhysicalDeviceHostImageCopyFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceHostImageCopyFeatures*>( this );
}
operator VkPhysicalDeviceHostImageCopyFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceHostImageCopyFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, hostImageCopy );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceHostImageCopyFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceHostImageCopyFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( hostImageCopy == rhs.hostImageCopy );
#endif
}
bool operator!=( PhysicalDeviceHostImageCopyFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceHostImageCopyFeatures>
{
using Type = PhysicalDeviceHostImageCopyFeatures;
};
using PhysicalDeviceHostImageCopyFeaturesEXT = PhysicalDeviceHostImageCopyFeatures;
// wrapper struct for struct VkPhysicalDeviceHostImageCopyProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHostImageCopyProperties.html
struct PhysicalDeviceHostImageCopyProperties
{
using NativeType = VkPhysicalDeviceHostImageCopyProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties(uint32_t copySrcLayoutCount_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ = {}, uint32_t copyDstLayoutCount_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & optimalTilingLayoutUUID_ = {}, VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, copySrcLayoutCount{ copySrcLayoutCount_ }, pCopySrcLayouts{ pCopySrcLayouts_ }, copyDstLayoutCount{ copyDstLayoutCount_ }, pCopyDstLayouts{ pCopyDstLayouts_ }, optimalTilingLayoutUUID{ optimalTilingLayoutUUID_ }, identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceHostImageCopyProperties( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceHostImageCopyProperties( *reinterpret_cast<PhysicalDeviceHostImageCopyProperties const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PhysicalDeviceHostImageCopyProperties( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageLayout> const & copySrcLayouts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageLayout> const & copyDstLayouts_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & optimalTilingLayoutUUID_ = {}, VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, void * pNext_ = nullptr )
: pNext( pNext_ ), copySrcLayoutCount( static_cast<uint32_t>( copySrcLayouts_.size() ) ), pCopySrcLayouts( copySrcLayouts_.data() ), copyDstLayoutCount( static_cast<uint32_t>( copyDstLayouts_.size() ) ), pCopyDstLayouts( copyDstLayouts_.data() ), optimalTilingLayoutUUID( optimalTilingLayoutUUID_ ), identicalMemoryTypeRequirements( identicalMemoryTypeRequirements_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PhysicalDeviceHostImageCopyProperties & operator=( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceHostImageCopyProperties & operator=( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyProperties const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setCopySrcLayoutCount( uint32_t copySrcLayoutCount_ ) VULKAN_HPP_NOEXCEPT
{
copySrcLayoutCount = copySrcLayoutCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPCopySrcLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pCopySrcLayouts = pCopySrcLayouts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PhysicalDeviceHostImageCopyProperties & setCopySrcLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageLayout> const & copySrcLayouts_ ) VULKAN_HPP_NOEXCEPT
{
copySrcLayoutCount = static_cast<uint32_t>( copySrcLayouts_.size() );
pCopySrcLayouts = copySrcLayouts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setCopyDstLayoutCount( uint32_t copyDstLayoutCount_ ) VULKAN_HPP_NOEXCEPT
{
copyDstLayoutCount = copyDstLayoutCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPCopyDstLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pCopyDstLayouts = pCopyDstLayouts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PhysicalDeviceHostImageCopyProperties & setCopyDstLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageLayout> const & copyDstLayouts_ ) VULKAN_HPP_NOEXCEPT
{
copyDstLayoutCount = static_cast<uint32_t>( copyDstLayouts_.size() );
pCopyDstLayouts = copyDstLayouts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setOptimalTilingLayoutUUID( std::array<uint8_t,VK_UUID_SIZE> optimalTilingLayoutUUID_ ) VULKAN_HPP_NOEXCEPT
{
optimalTilingLayoutUUID = optimalTilingLayoutUUID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setIdenticalMemoryTypeRequirements( VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ ) VULKAN_HPP_NOEXCEPT
{
identicalMemoryTypeRequirements = identicalMemoryTypeRequirements_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceHostImageCopyProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceHostImageCopyProperties*>( this );
}
operator VkPhysicalDeviceHostImageCopyProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceHostImageCopyProperties*>( this );
}
operator VkPhysicalDeviceHostImageCopyProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceHostImageCopyProperties*>( this );
}
operator VkPhysicalDeviceHostImageCopyProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceHostImageCopyProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageLayout * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageLayout * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, copySrcLayoutCount, pCopySrcLayouts, copyDstLayoutCount, pCopyDstLayouts, optimalTilingLayoutUUID, identicalMemoryTypeRequirements );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceHostImageCopyProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceHostImageCopyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( copySrcLayoutCount == rhs.copySrcLayoutCount )
&& ( pCopySrcLayouts == rhs.pCopySrcLayouts )
&& ( copyDstLayoutCount == rhs.copyDstLayoutCount )
&& ( pCopyDstLayouts == rhs.pCopyDstLayouts )
&& ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID )
&& ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements );
#endif
}
bool operator!=( PhysicalDeviceHostImageCopyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyProperties;
void * pNext = {};
uint32_t copySrcLayoutCount = {};
VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts = {};
uint32_t copyDstLayoutCount = {};
VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> optimalTilingLayoutUUID = {};
VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceHostImageCopyProperties>
{
using Type = PhysicalDeviceHostImageCopyProperties;
};
using PhysicalDeviceHostImageCopyPropertiesEXT = PhysicalDeviceHostImageCopyProperties;
// wrapper struct for struct VkPhysicalDeviceHostQueryResetFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHostQueryResetFeatures.html
struct PhysicalDeviceHostQueryResetFeatures
{
using NativeType = VkPhysicalDeviceHostQueryResetFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures(VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, hostQueryReset{ hostQueryReset_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast<PhysicalDeviceHostQueryResetFeatures const *>( &rhs ) )
{}
PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
{
hostQueryReset = hostQueryReset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures*>( this );
}
operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures*>( this );
}
operator VkPhysicalDeviceHostQueryResetFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures*>( this );
}
operator VkPhysicalDeviceHostQueryResetFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, hostQueryReset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( hostQueryReset == rhs.hostQueryReset );
#endif
}
bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceHostQueryResetFeatures>
{
using Type = PhysicalDeviceHostQueryResetFeatures;
};
using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;
// wrapper struct for struct VkPhysicalDeviceIDProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceIDProperties.html
struct PhysicalDeviceIDProperties
{
using NativeType = VkPhysicalDeviceIDProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties(std::array<uint8_t,VK_UUID_SIZE> const & deviceUUID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & driverUUID_ = {}, std::array<uint8_t,VK_LUID_SIZE> const & deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceUUID{ deviceUUID_ }, driverUUID{ driverUUID_ }, deviceLUID{ deviceLUID_ }, deviceNodeMask{ deviceNodeMask_ }, deviceLUIDValid{ deviceLUIDValid_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceIDProperties( *reinterpret_cast<PhysicalDeviceIDProperties const *>( &rhs ) )
{}
PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceIDProperties*>( this );
}
operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceIDProperties*>( this );
}
operator VkPhysicalDeviceIDProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceIDProperties*>( this );
}
operator VkPhysicalDeviceIDProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceIDProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceIDProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceUUID == rhs.deviceUUID )
&& ( driverUUID == rhs.driverUUID )
&& ( deviceLUID == rhs.deviceLUID )
&& ( deviceNodeMask == rhs.deviceNodeMask )
&& ( deviceLUIDValid == rhs.deviceLUIDValid );
#endif
}
bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
uint32_t deviceNodeMask = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceIdProperties>
{
using Type = PhysicalDeviceIDProperties;
};
using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
// wrapper struct for struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImage2DViewOf3DFeaturesEXT.html
struct PhysicalDeviceImage2DViewOf3DFeaturesEXT
{
using NativeType = VkPhysicalDeviceImage2DViewOf3DFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, image2DViewOf3D{ image2DViewOf3D_ }, sampler2DViewOf3D{ sampler2DViewOf3D_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImage2DViewOf3DFeaturesEXT( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImage2DViewOf3DFeaturesEXT( *reinterpret_cast<PhysicalDeviceImage2DViewOf3DFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setImage2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT
{
image2DViewOf3D = image2DViewOf3D_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setSampler2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT
{
sampler2DViewOf3D = sampler2DViewOf3D_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImage2DViewOf3DFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImage2DViewOf3DFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImage2DViewOf3DFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImage2DViewOf3DFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, image2DViewOf3D, sampler2DViewOf3D );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( image2DViewOf3D == rhs.image2DViewOf3D )
&& ( sampler2DViewOf3D == rhs.sampler2DViewOf3D );
#endif
}
bool operator!=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D = {};
VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT>
{
using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceImageAlignmentControlFeaturesMESA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageAlignmentControlFeaturesMESA.html
struct PhysicalDeviceImageAlignmentControlFeaturesMESA
{
using NativeType = VkPhysicalDeviceImageAlignmentControlFeaturesMESA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlFeaturesMESA(VULKAN_HPP_NAMESPACE::Bool32 imageAlignmentControl_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageAlignmentControl{ imageAlignmentControl_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlFeaturesMESA( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageAlignmentControlFeaturesMESA( VkPhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageAlignmentControlFeaturesMESA( *reinterpret_cast<PhysicalDeviceImageAlignmentControlFeaturesMESA const *>( &rhs ) )
{}
PhysicalDeviceImageAlignmentControlFeaturesMESA & operator=( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageAlignmentControlFeaturesMESA & operator=( VkPhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlFeaturesMESA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA & setImageAlignmentControl( VULKAN_HPP_NAMESPACE::Bool32 imageAlignmentControl_ ) VULKAN_HPP_NOEXCEPT
{
imageAlignmentControl = imageAlignmentControl_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageAlignmentControlFeaturesMESA*>( this );
}
operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageAlignmentControlFeaturesMESA*>( this );
}
operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageAlignmentControlFeaturesMESA*>( this );
}
operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageAlignmentControlFeaturesMESA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageAlignmentControl );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageAlignmentControlFeaturesMESA const & ) const = default;
#else
bool operator==( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageAlignmentControl == rhs.imageAlignmentControl );
#endif
}
bool operator!=( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 imageAlignmentControl = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA>
{
using Type = PhysicalDeviceImageAlignmentControlFeaturesMESA;
};
// wrapper struct for struct VkPhysicalDeviceImageAlignmentControlPropertiesMESA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageAlignmentControlPropertiesMESA.html
struct PhysicalDeviceImageAlignmentControlPropertiesMESA
{
using NativeType = VkPhysicalDeviceImageAlignmentControlPropertiesMESA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlPropertiesMESA(uint32_t supportedImageAlignmentMask_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, supportedImageAlignmentMask{ supportedImageAlignmentMask_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlPropertiesMESA( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageAlignmentControlPropertiesMESA( VkPhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageAlignmentControlPropertiesMESA( *reinterpret_cast<PhysicalDeviceImageAlignmentControlPropertiesMESA const *>( &rhs ) )
{}
PhysicalDeviceImageAlignmentControlPropertiesMESA & operator=( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageAlignmentControlPropertiesMESA & operator=( VkPhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlPropertiesMESA const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageAlignmentControlPropertiesMESA*>( this );
}
operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageAlignmentControlPropertiesMESA*>( this );
}
operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageAlignmentControlPropertiesMESA*>( this );
}
operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageAlignmentControlPropertiesMESA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, supportedImageAlignmentMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageAlignmentControlPropertiesMESA const & ) const = default;
#else
bool operator==( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( supportedImageAlignmentMask == rhs.supportedImageAlignmentMask );
#endif
}
bool operator!=( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA;
void * pNext = {};
uint32_t supportedImageAlignmentMask = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA>
{
using Type = PhysicalDeviceImageAlignmentControlPropertiesMESA;
};
// wrapper struct for struct VkPhysicalDeviceImageCompressionControlFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageCompressionControlFeaturesEXT.html
struct PhysicalDeviceImageCompressionControlFeaturesEXT
{
using NativeType = VkPhysicalDeviceImageCompressionControlFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageCompressionControl{ imageCompressionControl_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageCompressionControlFeaturesEXT( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageCompressionControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageCompressionControlFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setImageCompressionControl( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ ) VULKAN_HPP_NOEXCEPT
{
imageCompressionControl = imageCompressionControl_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageCompressionControlFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImageCompressionControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageCompressionControlFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageCompressionControlFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImageCompressionControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageCompressionControlFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageCompressionControl );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageCompressionControlFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageCompressionControl == rhs.imageCompressionControl );
#endif
}
bool operator!=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT>
{
using Type = PhysicalDeviceImageCompressionControlFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT.html
struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT
{
using NativeType = VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageCompressionControlSwapchain{ imageCompressionControlSwapchain_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & operator=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & operator=( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & setImageCompressionControlSwapchain( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ ) VULKAN_HPP_NOEXCEPT
{
imageCompressionControlSwapchain = imageCompressionControlSwapchain_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageCompressionControlSwapchain );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageCompressionControlSwapchain == rhs.imageCompressionControlSwapchain );
#endif
}
bool operator!=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>
{
using Type = PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageDrmFormatModifierInfoEXT.html
struct PhysicalDeviceImageDrmFormatModifierInfoEXT
{
using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(uint64_t drmFormatModifier_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, drmFormatModifier{ drmFormatModifier_ }, sharingMode{ sharingMode_ }, queueFamilyIndexCount{ queueFamilyIndexCount_ }, pQueueFamilyIndices{ pQueueFamilyIndices_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageDrmFormatModifierInfoEXT( *reinterpret_cast<PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, const void * pNext_ = nullptr )
: pNext( pNext_ ), drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
{
drmFormatModifier = drmFormatModifier_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
{
sharingMode = sharingMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = queueFamilyIndexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
pQueueFamilyIndices = pQueueFamilyIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
pQueueFamilyIndices = queueFamilyIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
}
operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
}
operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
}
operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( drmFormatModifier == rhs.drmFormatModifier )
&& ( sharingMode == rhs.sharingMode )
&& ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
&& ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
#endif
}
bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
const void * pNext = {};
uint64_t drmFormatModifier = {};
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
uint32_t queueFamilyIndexCount = {};
const uint32_t * pQueueFamilyIndices = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT>
{
using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT;
};
// wrapper struct for struct VkPhysicalDeviceImageFormatInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageFormatInfo2.html
struct PhysicalDeviceImageFormatInfo2
{
using NativeType = VkPhysicalDeviceImageFormatInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, format{ format_ }, type{ type_ }, tiling{ tiling_ }, usage{ usage_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageFormatInfo2( *reinterpret_cast<PhysicalDeviceImageFormatInfo2 const *>( &rhs ) )
{}
PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
{
tiling = tiling_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( this );
}
operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>( this );
}
operator VkPhysicalDeviceImageFormatInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( this );
}
operator VkPhysicalDeviceImageFormatInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ImageType const &, VULKAN_HPP_NAMESPACE::ImageTiling const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::ImageCreateFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, format, type, tiling, usage, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default;
#else
bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( format == rhs.format )
&& ( type == rhs.type )
&& ( tiling == rhs.tiling )
&& ( usage == rhs.usage )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageFormatInfo2>
{
using Type = PhysicalDeviceImageFormatInfo2;
};
using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
// wrapper struct for struct VkPhysicalDeviceImageProcessing2FeaturesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessing2FeaturesQCOM.html
struct PhysicalDeviceImageProcessing2FeaturesQCOM
{
using NativeType = VkPhysicalDeviceImageProcessing2FeaturesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, textureBlockMatch2{ textureBlockMatch2_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageProcessing2FeaturesQCOM( VkPhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageProcessing2FeaturesQCOM( *reinterpret_cast<PhysicalDeviceImageProcessing2FeaturesQCOM const *>( &rhs ) )
{}
PhysicalDeviceImageProcessing2FeaturesQCOM & operator=( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageProcessing2FeaturesQCOM & operator=( VkPhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM & setTextureBlockMatch2( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2_ ) VULKAN_HPP_NOEXCEPT
{
textureBlockMatch2 = textureBlockMatch2_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImageProcessing2FeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageProcessing2FeaturesQCOM*>( this );
}
operator VkPhysicalDeviceImageProcessing2FeaturesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageProcessing2FeaturesQCOM*>( this );
}
operator VkPhysicalDeviceImageProcessing2FeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageProcessing2FeaturesQCOM*>( this );
}
operator VkPhysicalDeviceImageProcessing2FeaturesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageProcessing2FeaturesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, textureBlockMatch2 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageProcessing2FeaturesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( textureBlockMatch2 == rhs.textureBlockMatch2 );
#endif
}
bool operator!=( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM>
{
using Type = PhysicalDeviceImageProcessing2FeaturesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceImageProcessing2PropertiesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessing2PropertiesQCOM.html
struct PhysicalDeviceImageProcessing2PropertiesQCOM
{
using NativeType = VkPhysicalDeviceImageProcessing2PropertiesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM(VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchWindow_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxBlockMatchWindow{ maxBlockMatchWindow_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageProcessing2PropertiesQCOM( VkPhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageProcessing2PropertiesQCOM( *reinterpret_cast<PhysicalDeviceImageProcessing2PropertiesQCOM const *>( &rhs ) )
{}
PhysicalDeviceImageProcessing2PropertiesQCOM & operator=( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageProcessing2PropertiesQCOM & operator=( VkPhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceImageProcessing2PropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageProcessing2PropertiesQCOM*>( this );
}
operator VkPhysicalDeviceImageProcessing2PropertiesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageProcessing2PropertiesQCOM*>( this );
}
operator VkPhysicalDeviceImageProcessing2PropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageProcessing2PropertiesQCOM*>( this );
}
operator VkPhysicalDeviceImageProcessing2PropertiesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageProcessing2PropertiesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxBlockMatchWindow );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageProcessing2PropertiesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxBlockMatchWindow == rhs.maxBlockMatchWindow );
#endif
}
bool operator!=( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchWindow = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM>
{
using Type = PhysicalDeviceImageProcessing2PropertiesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceImageProcessingFeaturesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessingFeaturesQCOM.html
struct PhysicalDeviceImageProcessingFeaturesQCOM
{
using NativeType = VkPhysicalDeviceImageProcessingFeaturesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, textureSampleWeighted{ textureSampleWeighted_ }, textureBoxFilter{ textureBoxFilter_ }, textureBlockMatch{ textureBlockMatch_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageProcessingFeaturesQCOM( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageProcessingFeaturesQCOM( *reinterpret_cast<PhysicalDeviceImageProcessingFeaturesQCOM const *>( &rhs ) )
{}
PhysicalDeviceImageProcessingFeaturesQCOM & operator=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageProcessingFeaturesQCOM & operator=( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setTextureSampleWeighted( VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ ) VULKAN_HPP_NOEXCEPT
{
textureSampleWeighted = textureSampleWeighted_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setTextureBoxFilter( VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ ) VULKAN_HPP_NOEXCEPT
{
textureBoxFilter = textureBoxFilter_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setTextureBlockMatch( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ ) VULKAN_HPP_NOEXCEPT
{
textureBlockMatch = textureBlockMatch_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImageProcessingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageProcessingFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceImageProcessingFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageProcessingFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceImageProcessingFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageProcessingFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceImageProcessingFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageProcessingFeaturesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, textureSampleWeighted, textureBoxFilter, textureBlockMatch );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageProcessingFeaturesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( textureSampleWeighted == rhs.textureSampleWeighted )
&& ( textureBoxFilter == rhs.textureBoxFilter )
&& ( textureBlockMatch == rhs.textureBlockMatch );
#endif
}
bool operator!=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted = {};
VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter = {};
VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM>
{
using Type = PhysicalDeviceImageProcessingFeaturesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceImageProcessingPropertiesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessingPropertiesQCOM.html
struct PhysicalDeviceImageProcessingPropertiesQCOM
{
using NativeType = VkPhysicalDeviceImageProcessingPropertiesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM(uint32_t maxWeightFilterPhases_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxWeightFilterPhases{ maxWeightFilterPhases_ }, maxWeightFilterDimension{ maxWeightFilterDimension_ }, maxBlockMatchRegion{ maxBlockMatchRegion_ }, maxBoxFilterBlockSize{ maxBoxFilterBlockSize_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageProcessingPropertiesQCOM( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageProcessingPropertiesQCOM( *reinterpret_cast<PhysicalDeviceImageProcessingPropertiesQCOM const *>( &rhs ) )
{}
PhysicalDeviceImageProcessingPropertiesQCOM & operator=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageProcessingPropertiesQCOM & operator=( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceImageProcessingPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageProcessingPropertiesQCOM*>( this );
}
operator VkPhysicalDeviceImageProcessingPropertiesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageProcessingPropertiesQCOM*>( this );
}
operator VkPhysicalDeviceImageProcessingPropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageProcessingPropertiesQCOM*>( this );
}
operator VkPhysicalDeviceImageProcessingPropertiesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageProcessingPropertiesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxWeightFilterPhases, maxWeightFilterDimension, maxBlockMatchRegion, maxBoxFilterBlockSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageProcessingPropertiesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxWeightFilterPhases == rhs.maxWeightFilterPhases )
&& ( maxWeightFilterDimension == rhs.maxWeightFilterDimension )
&& ( maxBlockMatchRegion == rhs.maxBlockMatchRegion )
&& ( maxBoxFilterBlockSize == rhs.maxBoxFilterBlockSize );
#endif
}
bool operator!=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM;
void * pNext = {};
uint32_t maxWeightFilterPhases = {};
VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension = {};
VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion = {};
VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM>
{
using Type = PhysicalDeviceImageProcessingPropertiesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceImageRobustnessFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageRobustnessFeatures.html
struct PhysicalDeviceImageRobustnessFeatures
{
using NativeType = VkPhysicalDeviceImageRobustnessFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, robustImageAccess{ robustImageAccess_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageRobustnessFeatures( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageRobustnessFeatures( *reinterpret_cast<PhysicalDeviceImageRobustnessFeatures const *>( &rhs ) )
{}
PhysicalDeviceImageRobustnessFeatures & operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageRobustnessFeatures & operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
{
robustImageAccess = robustImageAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeatures*>( this );
}
operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageRobustnessFeatures*>( this );
}
operator VkPhysicalDeviceImageRobustnessFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeatures*>( this );
}
operator VkPhysicalDeviceImageRobustnessFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageRobustnessFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, robustImageAccess );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageRobustnessFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( robustImageAccess == rhs.robustImageAccess );
#endif
}
bool operator!=( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageRobustnessFeatures>
{
using Type = PhysicalDeviceImageRobustnessFeatures;
};
using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures;
// wrapper struct for struct VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT.html
struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT
{
using NativeType = VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageSlicedViewOf3D{ imageSlicedViewOf3D_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & operator=( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & setImageSlicedViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D_ ) VULKAN_HPP_NOEXCEPT
{
imageSlicedViewOf3D = imageSlicedViewOf3D_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageSlicedViewOf3D );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageSlicedViewOf3D == rhs.imageSlicedViewOf3D );
#endif
}
bool operator!=( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT>
{
using Type = PhysicalDeviceImageSlicedViewOf3DFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceImageViewImageFormatInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageViewImageFormatInfoEXT.html
struct PhysicalDeviceImageViewImageFormatInfoEXT
{
using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT(VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageViewType{ imageViewType_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageViewImageFormatInfoEXT( *reinterpret_cast<PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs ) )
{}
PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT
{
imageViewType = imageViewType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
}
operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
}
operator VkPhysicalDeviceImageViewImageFormatInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
}
operator VkPhysicalDeviceImageViewImageFormatInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageViewType const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageViewType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageViewType == rhs.imageViewType );
#endif
}
bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT>
{
using Type = PhysicalDeviceImageViewImageFormatInfoEXT;
};
// wrapper struct for struct VkPhysicalDeviceImageViewMinLodFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageViewMinLodFeaturesEXT.html
struct PhysicalDeviceImageViewMinLodFeaturesEXT
{
using NativeType = VkPhysicalDeviceImageViewMinLodFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 minLod_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minLod{ minLod_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageViewMinLodFeaturesEXT( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageViewMinLodFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageViewMinLodFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setMinLod( VULKAN_HPP_NAMESPACE::Bool32 minLod_ ) VULKAN_HPP_NOEXCEPT
{
minLod = minLod_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageViewMinLodFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageViewMinLodFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImageViewMinLodFeaturesEXT*>( this );
}
operator VkPhysicalDeviceImageViewMinLodFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImageViewMinLodFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minLod );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImageViewMinLodFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minLod == rhs.minLod );
#endif
}
bool operator!=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 minLod = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT>
{
using Type = PhysicalDeviceImageViewMinLodFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceImagelessFramebufferFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImagelessFramebufferFeatures.html
struct PhysicalDeviceImagelessFramebufferFeatures
{
using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imagelessFramebuffer{ imagelessFramebuffer_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImagelessFramebufferFeatures( *reinterpret_cast<PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs ) )
{}
PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
{
imagelessFramebuffer = imagelessFramebuffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
}
operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
}
operator VkPhysicalDeviceImagelessFramebufferFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
}
operator VkPhysicalDeviceImagelessFramebufferFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imagelessFramebuffer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imagelessFramebuffer == rhs.imagelessFramebuffer );
#endif
}
bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImagelessFramebufferFeatures>
{
using Type = PhysicalDeviceImagelessFramebufferFeatures;
};
using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;
// wrapper struct for struct VkPhysicalDeviceIndexTypeUint8Features, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceIndexTypeUint8Features.html
struct PhysicalDeviceIndexTypeUint8Features
{
using NativeType = VkPhysicalDeviceIndexTypeUint8Features;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8Features;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8Features(VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, indexTypeUint8{ indexTypeUint8_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8Features( PhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceIndexTypeUint8Features( VkPhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceIndexTypeUint8Features( *reinterpret_cast<PhysicalDeviceIndexTypeUint8Features const *>( &rhs ) )
{}
PhysicalDeviceIndexTypeUint8Features & operator=( PhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceIndexTypeUint8Features & operator=( VkPhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8Features const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
{
indexTypeUint8 = indexTypeUint8_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceIndexTypeUint8Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8Features*>( this );
}
operator VkPhysicalDeviceIndexTypeUint8Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8Features*>( this );
}
operator VkPhysicalDeviceIndexTypeUint8Features const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8Features*>( this );
}
operator VkPhysicalDeviceIndexTypeUint8Features *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceIndexTypeUint8Features*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, indexTypeUint8 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceIndexTypeUint8Features const & ) const = default;
#else
bool operator==( PhysicalDeviceIndexTypeUint8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( indexTypeUint8 == rhs.indexTypeUint8 );
#endif
}
bool operator!=( PhysicalDeviceIndexTypeUint8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8Features;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceIndexTypeUint8Features>
{
using Type = PhysicalDeviceIndexTypeUint8Features;
};
using PhysicalDeviceIndexTypeUint8FeaturesEXT = PhysicalDeviceIndexTypeUint8Features;
using PhysicalDeviceIndexTypeUint8FeaturesKHR = PhysicalDeviceIndexTypeUint8Features;
// wrapper struct for struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInheritedViewportScissorFeaturesNV.html
struct PhysicalDeviceInheritedViewportScissorFeaturesNV
{
using NativeType = VkPhysicalDeviceInheritedViewportScissorFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, inheritedViewportScissor2D{ inheritedViewportScissor2D_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceInheritedViewportScissorFeaturesNV( *reinterpret_cast<PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT
{
inheritedViewportScissor2D = inheritedViewportScissor2D_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceInheritedViewportScissorFeaturesNV*>( this );
}
operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceInheritedViewportScissorFeaturesNV*>( this );
}
operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceInheritedViewportScissorFeaturesNV*>( this );
}
operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceInheritedViewportScissorFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, inheritedViewportScissor2D );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D );
#endif
}
bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV>
{
using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceInlineUniformBlockFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInlineUniformBlockFeatures.html
struct PhysicalDeviceInlineUniformBlockFeatures
{
using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures(VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, inlineUniformBlock{ inlineUniformBlock_ }, descriptorBindingInlineUniformBlockUpdateAfterBind{ descriptorBindingInlineUniformBlockUpdateAfterBind_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceInlineUniformBlockFeatures( *reinterpret_cast<PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs ) )
{}
PhysicalDeviceInlineUniformBlockFeatures & operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceInlineUniformBlockFeatures & operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
{
inlineUniformBlock = inlineUniformBlock_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeatures*>( this );
}
operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeatures*>( this );
}
operator VkPhysicalDeviceInlineUniformBlockFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeatures*>( this );
}
operator VkPhysicalDeviceInlineUniformBlockFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceInlineUniformBlockFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( inlineUniformBlock == rhs.inlineUniformBlock )
&& ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
#endif
}
bool operator!=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockFeatures>
{
using Type = PhysicalDeviceInlineUniformBlockFeatures;
};
using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures;
// wrapper struct for struct VkPhysicalDeviceInlineUniformBlockProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInlineUniformBlockProperties.html
struct PhysicalDeviceInlineUniformBlockProperties
{
using NativeType = VkPhysicalDeviceInlineUniformBlockProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties(uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxInlineUniformBlockSize{ maxInlineUniformBlockSize_ }, maxPerStageDescriptorInlineUniformBlocks{ maxPerStageDescriptorInlineUniformBlocks_ }, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks{ maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ }, maxDescriptorSetInlineUniformBlocks{ maxDescriptorSetInlineUniformBlocks_ }, maxDescriptorSetUpdateAfterBindInlineUniformBlocks{ maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceInlineUniformBlockProperties( *reinterpret_cast<PhysicalDeviceInlineUniformBlockProperties const *>( &rhs ) )
{}
PhysicalDeviceInlineUniformBlockProperties & operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceInlineUniformBlockProperties & operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockProperties*>( this );
}
operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockProperties*>( this );
}
operator VkPhysicalDeviceInlineUniformBlockProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockProperties*>( this );
}
operator VkPhysicalDeviceInlineUniformBlockProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceInlineUniformBlockProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxInlineUniformBlockSize, maxPerStageDescriptorInlineUniformBlocks, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, maxDescriptorSetInlineUniformBlocks, maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceInlineUniformBlockProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize )
&& ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks )
&& ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks )
&& ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks )
&& ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
#endif
}
bool operator!=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties;
void * pNext = {};
uint32_t maxInlineUniformBlockSize = {};
uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
uint32_t maxDescriptorSetInlineUniformBlocks = {};
uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockProperties>
{
using Type = PhysicalDeviceInlineUniformBlockProperties;
};
using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties;
// wrapper struct for struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInvocationMaskFeaturesHUAWEI.html
struct PhysicalDeviceInvocationMaskFeaturesHUAWEI
{
using NativeType = VkPhysicalDeviceInvocationMaskFeaturesHUAWEI;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI(VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, invocationMask{ invocationMask_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceInvocationMaskFeaturesHUAWEI( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceInvocationMaskFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceInvocationMaskFeaturesHUAWEI const *>( &rhs ) )
{}
PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setInvocationMask( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ ) VULKAN_HPP_NOEXCEPT
{
invocationMask = invocationMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceInvocationMaskFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceInvocationMaskFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceInvocationMaskFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceInvocationMaskFeaturesHUAWEI*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, invocationMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & ) const = default;
#else
bool operator==( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( invocationMask == rhs.invocationMask );
#endif
}
bool operator!=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 invocationMask = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI>
{
using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI;
};
// wrapper struct for struct VkPhysicalDeviceLayeredApiPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiPropertiesKHR.html
struct PhysicalDeviceLayeredApiPropertiesKHR
{
using NativeType = VkPhysicalDeviceLayeredApiPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesKHR(uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR layeredAPI_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR::eVulkan, std::array<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const & deviceName_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, vendorID{ vendorID_ }, deviceID{ deviceID_ }, layeredAPI{ layeredAPI_ }, deviceName{ deviceName_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesKHR( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLayeredApiPropertiesKHR( VkPhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceLayeredApiPropertiesKHR( *reinterpret_cast<PhysicalDeviceLayeredApiPropertiesKHR const *>( &rhs ) )
{}
PhysicalDeviceLayeredApiPropertiesKHR & operator=( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceLayeredApiPropertiesKHR & operator=( VkPhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceLayeredApiPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLayeredApiPropertiesKHR*>( this );
}
operator VkPhysicalDeviceLayeredApiPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLayeredApiPropertiesKHR*>( this );
}
operator VkPhysicalDeviceLayeredApiPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceLayeredApiPropertiesKHR*>( this );
}
operator VkPhysicalDeviceLayeredApiPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceLayeredApiPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, vendorID, deviceID, layeredAPI, deviceName );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceLayeredApiPropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( vendorID == rhs.vendorID )
&& ( deviceID == rhs.deviceID )
&& ( layeredAPI == rhs.layeredAPI )
&& ( deviceName == rhs.deviceName );
#endif
}
bool operator!=( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredApiPropertiesKHR;
void * pNext = {};
uint32_t vendorID = {};
uint32_t deviceID = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR layeredAPI = VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR::eVulkan;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> deviceName = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLayeredApiPropertiesKHR>
{
using Type = PhysicalDeviceLayeredApiPropertiesKHR;
};
// wrapper struct for struct VkPhysicalDeviceLayeredApiPropertiesListKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiPropertiesListKHR.html
struct PhysicalDeviceLayeredApiPropertiesListKHR
{
using NativeType = VkPhysicalDeviceLayeredApiPropertiesListKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR(uint32_t layeredApiCount_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, layeredApiCount{ layeredApiCount_ }, pLayeredApis{ pLayeredApis_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLayeredApiPropertiesListKHR( VkPhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceLayeredApiPropertiesListKHR( *reinterpret_cast<PhysicalDeviceLayeredApiPropertiesListKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PhysicalDeviceLayeredApiPropertiesListKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR> const & layeredApis_, void * pNext_ = nullptr )
: pNext( pNext_ ), layeredApiCount( static_cast<uint32_t>( layeredApis_.size() ) ), pLayeredApis( layeredApis_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PhysicalDeviceLayeredApiPropertiesListKHR & operator=( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceLayeredApiPropertiesListKHR & operator=( VkPhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesListKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setLayeredApiCount( uint32_t layeredApiCount_ ) VULKAN_HPP_NOEXCEPT
{
layeredApiCount = layeredApiCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setPLayeredApis( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_ ) VULKAN_HPP_NOEXCEPT
{
pLayeredApis = pLayeredApis_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PhysicalDeviceLayeredApiPropertiesListKHR & setLayeredApis( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR> const & layeredApis_ ) VULKAN_HPP_NOEXCEPT
{
layeredApiCount = static_cast<uint32_t>( layeredApis_.size() );
pLayeredApis = layeredApis_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceLayeredApiPropertiesListKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLayeredApiPropertiesListKHR*>( this );
}
operator VkPhysicalDeviceLayeredApiPropertiesListKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLayeredApiPropertiesListKHR*>( this );
}
operator VkPhysicalDeviceLayeredApiPropertiesListKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceLayeredApiPropertiesListKHR*>( this );
}
operator VkPhysicalDeviceLayeredApiPropertiesListKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceLayeredApiPropertiesListKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, layeredApiCount, pLayeredApis );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceLayeredApiPropertiesListKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( layeredApiCount == rhs.layeredApiCount )
&& ( pLayeredApis == rhs.pLayeredApis );
#endif
}
bool operator!=( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR;
void * pNext = {};
uint32_t layeredApiCount = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR>
{
using Type = PhysicalDeviceLayeredApiPropertiesListKHR;
};
// wrapper struct for struct VkPhysicalDeviceLimits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLimits.html
struct PhysicalDeviceLimits
{
using NativeType = VkPhysicalDeviceLimits;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits(uint32_t maxImageDimension1D_ = {}, uint32_t maxImageDimension2D_ = {}, uint32_t maxImageDimension3D_ = {}, uint32_t maxImageDimensionCube_ = {}, uint32_t maxImageArrayLayers_ = {}, uint32_t maxTexelBufferElements_ = {}, uint32_t maxUniformBufferRange_ = {}, uint32_t maxStorageBufferRange_ = {}, uint32_t maxPushConstantsSize_ = {}, uint32_t maxMemoryAllocationCount_ = {}, uint32_t maxSamplerAllocationCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, uint32_t maxBoundDescriptorSets_ = {}, uint32_t maxPerStageDescriptorSamplers_ = {}, uint32_t maxPerStageDescriptorUniformBuffers_ = {}, uint32_t maxPerStageDescriptorStorageBuffers_ = {}, uint32_t maxPerStageDescriptorSampledImages_ = {}, uint32_t maxPerStageDescriptorStorageImages_ = {}, uint32_t maxPerStageDescriptorInputAttachments_ = {}, uint32_t maxPerStageResources_ = {}, uint32_t maxDescriptorSetSamplers_ = {}, uint32_t maxDescriptorSetUniformBuffers_ = {}, uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetStorageBuffers_ = {}, uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetSampledImages_ = {}, uint32_t maxDescriptorSetStorageImages_ = {}, uint32_t maxDescriptorSetInputAttachments_ = {}, uint32_t maxVertexInputAttributes_ = {}, uint32_t maxVertexInputBindings_ = {}, uint32_t maxVertexInputAttributeOffset_ = {}, uint32_t maxVertexInputBindingStride_ = {}, uint32_t maxVertexOutputComponents_ = {}, uint32_t maxTessellationGenerationLevel_ = {}, uint32_t maxTessellationPatchSize_ = {}, uint32_t maxTessellationControlPerVertexInputComponents_ = {}, uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, uint32_t maxTessellationControlTotalOutputComponents_ = {}, uint32_t maxTessellationEvaluationInputComponents_ = {}, uint32_t maxTessellationEvaluationOutputComponents_ = {}, uint32_t maxGeometryShaderInvocations_ = {}, uint32_t maxGeometryInputComponents_ = {}, uint32_t maxGeometryOutputComponents_ = {}, uint32_t maxGeometryOutputVertices_ = {}, uint32_t maxGeometryTotalOutputComponents_ = {}, uint32_t maxFragmentInputComponents_ = {}, uint32_t maxFragmentOutputAttachments_ = {}, uint32_t maxFragmentDualSrcAttachments_ = {}, uint32_t maxFragmentCombinedOutputResources_ = {}, uint32_t maxComputeSharedMemorySize_ = {}, std::array<uint32_t,3> const & maxComputeWorkGroupCount_ = {}, uint32_t maxComputeWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxComputeWorkGroupSize_ = {}, uint32_t subPixelPrecisionBits_ = {}, uint32_t subTexelPrecisionBits_ = {}, uint32_t mipmapPrecisionBits_ = {}, uint32_t maxDrawIndexedIndexValue_ = {}, uint32_t maxDrawIndirectCount_ = {}, float maxSamplerLodBias_ = {}, float maxSamplerAnisotropy_ = {}, uint32_t maxViewports_ = {}, std::array<uint32_t,2> const & maxViewportDimensions_ = {}, std::array<float,2> const & viewportBoundsRange_ = {}, uint32_t viewportSubPixelBits_ = {}, size_t minMemoryMapAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, int32_t minTexelOffset_ = {}, uint32_t maxTexelOffset_ = {}, int32_t minTexelGatherOffset_ = {}, uint32_t maxTexelGatherOffset_ = {}, float minInterpolationOffset_ = {}, float maxInterpolationOffset_ = {}, uint32_t subPixelInterpolationOffsetBits_ = {}, uint32_t maxFramebufferWidth_ = {}, uint32_t maxFramebufferHeight_ = {}, uint32_t maxFramebufferLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, uint32_t maxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, uint32_t maxSampleMaskWords_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, float timestampPeriod_ = {}, uint32_t maxClipDistances_ = {}, uint32_t maxCullDistances_ = {}, uint32_t maxCombinedClipAndCullDistances_ = {}, uint32_t discreteQueuePriorities_ = {}, std::array<float,2> const & pointSizeRange_ = {}, std::array<float,2> const & lineWidthRange_ = {}, float pointSizeGranularity_ = {}, float lineWidthGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {}) VULKAN_HPP_NOEXCEPT
: maxImageDimension1D{ maxImageDimension1D_ }, maxImageDimension2D{ maxImageDimension2D_ }, maxImageDimension3D{ maxImageDimension3D_ }, maxImageDimensionCube{ maxImageDimensionCube_ }, maxImageArrayLayers{ maxImageArrayLayers_ }, maxTexelBufferElements{ maxTexelBufferElements_ }, maxUniformBufferRange{ maxUniformBufferRange_ }, maxStorageBufferRange{ maxStorageBufferRange_ }, maxPushConstantsSize{ maxPushConstantsSize_ }, maxMemoryAllocationCount{ maxMemoryAllocationCount_ }, maxSamplerAllocationCount{ maxSamplerAllocationCount_ }, bufferImageGranularity{ bufferImageGranularity_ }, sparseAddressSpaceSize{ sparseAddressSpaceSize_ }, maxBoundDescriptorSets{ maxBoundDescriptorSets_ }, maxPerStageDescriptorSamplers{ maxPerStageDescriptorSamplers_ }, maxPerStageDescriptorUniformBuffers{ maxPerStageDescriptorUniformBuffers_ }, maxPerStageDescriptorStorageBuffers{ maxPerStageDescriptorStorageBuffers_ }, maxPerStageDescriptorSampledImages{ maxPerStageDescriptorSampledImages_ }, maxPerStageDescriptorStorageImages{ maxPerStageDescriptorStorageImages_ }, maxPerStageDescriptorInputAttachments{ maxPerStageDescriptorInputAttachments_ }, maxPerStageResources{ maxPerStageResources_ }, maxDescriptorSetSamplers{ maxDescriptorSetSamplers_ }, maxDescriptorSetUniformBuffers{ maxDescriptorSetUniformBuffers_ }, maxDescriptorSetUniformBuffersDynamic{ maxDescriptorSetUniformBuffersDynamic_ }, maxDescriptorSetStorageBuffers{ maxDescriptorSetStorageBuffers_ }, maxDescriptorSetStorageBuffersDynamic{ maxDescriptorSetStorageBuffersDynamic_ }, maxDescriptorSetSampledImages{ maxDescriptorSetSampledImages_ }, maxDescriptorSetStorageImages{ maxDescriptorSetStorageImages_ }, maxDescriptorSetInputAttachments{ maxDescriptorSetInputAttachments_ }, maxVertexInputAttributes{ maxVertexInputAttributes_ }, maxVertexInputBindings{ maxVertexInputBindings_ }, maxVertexInputAttributeOffset{ maxVertexInputAttributeOffset_ }, maxVertexInputBindingStride{ maxVertexInputBindingStride_ }, maxVertexOutputComponents{ maxVertexOutputComponents_ }, maxTessellationGenerationLevel{ maxTessellationGenerationLevel_ }, maxTessellationPatchSize{ maxTessellationPatchSize_ }, maxTessellationControlPerVertexInputComponents{ maxTessellationControlPerVertexInputComponents_ }, maxTessellationControlPerVertexOutputComponents{ maxTessellationControlPerVertexOutputComponents_ }, maxTessellationControlPerPatchOutputComponents{ maxTessellationControlPerPatchOutputComponents_ }, maxTessellationControlTotalOutputComponents{ maxTessellationControlTotalOutputComponents_ }, maxTessellationEvaluationInputComponents{ maxTessellationEvaluationInputComponents_ }, maxTessellationEvaluationOutputComponents{ maxTessellationEvaluationOutputComponents_ }, maxGeometryShaderInvocations{ maxGeometryShaderInvocations_ }, maxGeometryInputComponents{ maxGeometryInputComponents_ }, maxGeometryOutputComponents{ maxGeometryOutputComponents_ }, maxGeometryOutputVertices{ maxGeometryOutputVertices_ }, maxGeometryTotalOutputComponents{ maxGeometryTotalOutputComponents_ }, maxFragmentInputComponents{ maxFragmentInputComponents_ }, maxFragmentOutputAttachments{ maxFragmentOutputAttachments_ }, maxFragmentDualSrcAttachments{ maxFragmentDualSrcAttachments_ }, maxFragmentCombinedOutputResources{ maxFragmentCombinedOutputResources_ }, maxComputeSharedMemorySize{ maxComputeSharedMemorySize_ }, maxComputeWorkGroupCount{ maxComputeWorkGroupCount_ }, maxComputeWorkGroupInvocations{ maxComputeWorkGroupInvocations_ }, maxComputeWorkGroupSize{ maxComputeWorkGroupSize_ }, subPixelPrecisionBits{ subPixelPrecisionBits_ }, subTexelPrecisionBits{ subTexelPrecisionBits_ }, mipmapPrecisionBits{ mipmapPrecisionBits_ }, maxDrawIndexedIndexValue{ maxDrawIndexedIndexValue_ }, maxDrawIndirectCount{ maxDrawIndirectCount_ }, maxSamplerLodBias{ maxSamplerLodBias_ }, maxSamplerAnisotropy{ maxSamplerAnisotropy_ }, maxViewports{ maxViewports_ }, maxViewportDimensions{ maxViewportDimensions_ }, viewportBoundsRange{ viewportBoundsRange_ }, viewportSubPixelBits{ viewportSubPixelBits_ }, minMemoryMapAlignment{ minMemoryMapAlignment_ }, minTexelBufferOffsetAlignment{ minTexelBufferOffsetAlignment_ }, minUniformBufferOffsetAlignment{ minUniformBufferOffsetAlignment_ }, minStorageBufferOffsetAlignment{ minStorageBufferOffsetAlignment_ }, minTexelOffset{ minTexelOffset_ }, maxTexelOffset{ maxTexelOffset_ }, minTexelGatherOffset{ minTexelGatherOffset_ }, maxTexelGatherOffset{ maxTexelGatherOffset_ }, minInterpolationOffset{ minInterpolationOffset_ }, maxInterpolationOffset{ maxInterpolationOffset_ }, subPixelInterpolationOffsetBits{ subPixelInterpolationOffsetBits_ }, maxFramebufferWidth{ maxFramebufferWidth_ }, maxFramebufferHeight{ maxFramebufferHeight_ }, maxFramebufferLayers{ maxFramebufferLayers_ }, framebufferColorSampleCounts{ framebufferColorSampleCounts_ }, framebufferDepthSampleCounts{ framebufferDepthSampleCounts_ }, framebufferStencilSampleCounts{ framebufferStencilSampleCounts_ }, framebufferNoAttachmentsSampleCounts{ framebufferNoAttachmentsSampleCounts_ }, maxColorAttachments{ maxColorAttachments_ }, sampledImageColorSampleCounts{ sampledImageColorSampleCounts_ }, sampledImageIntegerSampleCounts{ sampledImageIntegerSampleCounts_ }, sampledImageDepthSampleCounts{ sampledImageDepthSampleCounts_ }, sampledImageStencilSampleCounts{ sampledImageStencilSampleCounts_ }, storageImageSampleCounts{ storageImageSampleCounts_ }, maxSampleMaskWords{ maxSampleMaskWords_ }, timestampComputeAndGraphics{ timestampComputeAndGraphics_ }, timestampPeriod{ timestampPeriod_ }, maxClipDistances{ maxClipDistances_ }, maxCullDistances{ maxCullDistances_ }, maxCombinedClipAndCullDistances{ maxCombinedClipAndCullDistances_ }, discreteQueuePriorities{ discreteQueuePriorities_ }, pointSizeRange{ pointSizeRange_ }, lineWidthRange{ lineWidthRange_ }, pointSizeGranularity{ pointSizeGranularity_ }, lineWidthGranularity{ lineWidthGranularity_ }, strictLines{ strictLines_ }, standardSampleLocations{ standardSampleLocations_ }, optimalBufferCopyOffsetAlignment{ optimalBufferCopyOffsetAlignment_ }, optimalBufferCopyRowPitchAlignment{ optimalBufferCopyRowPitchAlignment_ }, nonCoherentAtomSize{ nonCoherentAtomSize_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceLimits( *reinterpret_cast<PhysicalDeviceLimits const *>( &rhs ) )
{}
PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLimits*>( this );
}
operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLimits*>( this );
}
operator VkPhysicalDeviceLimits const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceLimits*>( this );
}
operator VkPhysicalDeviceLimits *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceLimits*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, float const &, float const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 2> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &, uint32_t const &, size_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, int32_t const &, uint32_t const &, int32_t const &, uint32_t const &, float const &, float const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &, float const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( maxImageDimension1D, maxImageDimension2D, maxImageDimension3D, maxImageDimensionCube, maxImageArrayLayers, maxTexelBufferElements, maxUniformBufferRange, maxStorageBufferRange, maxPushConstantsSize, maxMemoryAllocationCount, maxSamplerAllocationCount, bufferImageGranularity, sparseAddressSpaceSize, maxBoundDescriptorSets, maxPerStageDescriptorSamplers, maxPerStageDescriptorUniformBuffers, maxPerStageDescriptorStorageBuffers, maxPerStageDescriptorSampledImages, maxPerStageDescriptorStorageImages, maxPerStageDescriptorInputAttachments, maxPerStageResources, maxDescriptorSetSamplers, maxDescriptorSetUniformBuffers, maxDescriptorSetUniformBuffersDynamic, maxDescriptorSetStorageBuffers, maxDescriptorSetStorageBuffersDynamic, maxDescriptorSetSampledImages, maxDescriptorSetStorageImages, maxDescriptorSetInputAttachments, maxVertexInputAttributes, maxVertexInputBindings, maxVertexInputAttributeOffset, maxVertexInputBindingStride, maxVertexOutputComponents, maxTessellationGenerationLevel, maxTessellationPatchSize, maxTessellationControlPerVertexInputComponents, maxTessellationControlPerVertexOutputComponents, maxTessellationControlPerPatchOutputComponents, maxTessellationControlTotalOutputComponents, maxTessellationEvaluationInputComponents, maxTessellationEvaluationOutputComponents, maxGeometryShaderInvocations, maxGeometryInputComponents, maxGeometryOutputComponents, maxGeometryOutputVertices, maxGeometryTotalOutputComponents, maxFragmentInputComponents, maxFragmentOutputAttachments, maxFragmentDualSrcAttachments, maxFragmentCombinedOutputResources, maxComputeSharedMemorySize, maxComputeWorkGroupCount, maxComputeWorkGroupInvocations, maxComputeWorkGroupSize, subPixelPrecisionBits, subTexelPrecisionBits, mipmapPrecisionBits, maxDrawIndexedIndexValue, maxDrawIndirectCount, maxSamplerLodBias, maxSamplerAnisotropy, maxViewports, maxViewportDimensions, viewportBoundsRange, viewportSubPixelBits, minMemoryMapAlignment, minTexelBufferOffsetAlignment, minUniformBufferOffsetAlignment, minStorageBufferOffsetAlignment, minTexelOffset, maxTexelOffset, minTexelGatherOffset, maxTexelGatherOffset, minInterpolationOffset, maxInterpolationOffset, subPixelInterpolationOffsetBits, maxFramebufferWidth, maxFramebufferHeight, maxFramebufferLayers, framebufferColorSampleCounts, framebufferDepthSampleCounts, framebufferStencilSampleCounts, framebufferNoAttachmentsSampleCounts, maxColorAttachments, sampledImageColorSampleCounts, sampledImageIntegerSampleCounts, sampledImageDepthSampleCounts, sampledImageStencilSampleCounts, storageImageSampleCounts, maxSampleMaskWords, timestampComputeAndGraphics, timestampPeriod, maxClipDistances, maxCullDistances, maxCombinedClipAndCullDistances, discreteQueuePriorities, pointSizeRange, lineWidthRange, pointSizeGranularity, lineWidthGranularity, strictLines, standardSampleLocations, optimalBufferCopyOffsetAlignment, optimalBufferCopyRowPitchAlignment, nonCoherentAtomSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceLimits const & ) const = default;
#else
bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( maxImageDimension1D == rhs.maxImageDimension1D )
&& ( maxImageDimension2D == rhs.maxImageDimension2D )
&& ( maxImageDimension3D == rhs.maxImageDimension3D )
&& ( maxImageDimensionCube == rhs.maxImageDimensionCube )
&& ( maxImageArrayLayers == rhs.maxImageArrayLayers )
&& ( maxTexelBufferElements == rhs.maxTexelBufferElements )
&& ( maxUniformBufferRange == rhs.maxUniformBufferRange )
&& ( maxStorageBufferRange == rhs.maxStorageBufferRange )
&& ( maxPushConstantsSize == rhs.maxPushConstantsSize )
&& ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
&& ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
&& ( bufferImageGranularity == rhs.bufferImageGranularity )
&& ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
&& ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
&& ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
&& ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
&& ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
&& ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
&& ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
&& ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
&& ( maxPerStageResources == rhs.maxPerStageResources )
&& ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
&& ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
&& ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
&& ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
&& ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
&& ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
&& ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
&& ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
&& ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
&& ( maxVertexInputBindings == rhs.maxVertexInputBindings )
&& ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
&& ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
&& ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
&& ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
&& ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
&& ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
&& ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
&& ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
&& ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
&& ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
&& ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
&& ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
&& ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
&& ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
&& ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
&& ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
&& ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
&& ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
&& ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
&& ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
&& ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
&& ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount )
&& ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
&& ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize )
&& ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
&& ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
&& ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
&& ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
&& ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
&& ( maxSamplerLodBias == rhs.maxSamplerLodBias )
&& ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
&& ( maxViewports == rhs.maxViewports )
&& ( maxViewportDimensions == rhs.maxViewportDimensions )
&& ( viewportBoundsRange == rhs.viewportBoundsRange )
&& ( viewportSubPixelBits == rhs.viewportSubPixelBits )
&& ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
&& ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
&& ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
&& ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
&& ( minTexelOffset == rhs.minTexelOffset )
&& ( maxTexelOffset == rhs.maxTexelOffset )
&& ( minTexelGatherOffset == rhs.minTexelGatherOffset )
&& ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
&& ( minInterpolationOffset == rhs.minInterpolationOffset )
&& ( maxInterpolationOffset == rhs.maxInterpolationOffset )
&& ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
&& ( maxFramebufferWidth == rhs.maxFramebufferWidth )
&& ( maxFramebufferHeight == rhs.maxFramebufferHeight )
&& ( maxFramebufferLayers == rhs.maxFramebufferLayers )
&& ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
&& ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
&& ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
&& ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
&& ( maxColorAttachments == rhs.maxColorAttachments )
&& ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
&& ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
&& ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
&& ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
&& ( storageImageSampleCounts == rhs.storageImageSampleCounts )
&& ( maxSampleMaskWords == rhs.maxSampleMaskWords )
&& ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
&& ( timestampPeriod == rhs.timestampPeriod )
&& ( maxClipDistances == rhs.maxClipDistances )
&& ( maxCullDistances == rhs.maxCullDistances )
&& ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
&& ( discreteQueuePriorities == rhs.discreteQueuePriorities )
&& ( pointSizeRange == rhs.pointSizeRange )
&& ( lineWidthRange == rhs.lineWidthRange )
&& ( pointSizeGranularity == rhs.pointSizeGranularity )
&& ( lineWidthGranularity == rhs.lineWidthGranularity )
&& ( strictLines == rhs.strictLines )
&& ( standardSampleLocations == rhs.standardSampleLocations )
&& ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
&& ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
&& ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
#endif
}
bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t maxImageDimension1D = {};
uint32_t maxImageDimension2D = {};
uint32_t maxImageDimension3D = {};
uint32_t maxImageDimensionCube = {};
uint32_t maxImageArrayLayers = {};
uint32_t maxTexelBufferElements = {};
uint32_t maxUniformBufferRange = {};
uint32_t maxStorageBufferRange = {};
uint32_t maxPushConstantsSize = {};
uint32_t maxMemoryAllocationCount = {};
uint32_t maxSamplerAllocationCount = {};
VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {};
VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {};
uint32_t maxBoundDescriptorSets = {};
uint32_t maxPerStageDescriptorSamplers = {};
uint32_t maxPerStageDescriptorUniformBuffers = {};
uint32_t maxPerStageDescriptorStorageBuffers = {};
uint32_t maxPerStageDescriptorSampledImages = {};
uint32_t maxPerStageDescriptorStorageImages = {};
uint32_t maxPerStageDescriptorInputAttachments = {};
uint32_t maxPerStageResources = {};
uint32_t maxDescriptorSetSamplers = {};
uint32_t maxDescriptorSetUniformBuffers = {};
uint32_t maxDescriptorSetUniformBuffersDynamic = {};
uint32_t maxDescriptorSetStorageBuffers = {};
uint32_t maxDescriptorSetStorageBuffersDynamic = {};
uint32_t maxDescriptorSetSampledImages = {};
uint32_t maxDescriptorSetStorageImages = {};
uint32_t maxDescriptorSetInputAttachments = {};
uint32_t maxVertexInputAttributes = {};
uint32_t maxVertexInputBindings = {};
uint32_t maxVertexInputAttributeOffset = {};
uint32_t maxVertexInputBindingStride = {};
uint32_t maxVertexOutputComponents = {};
uint32_t maxTessellationGenerationLevel = {};
uint32_t maxTessellationPatchSize = {};
uint32_t maxTessellationControlPerVertexInputComponents = {};
uint32_t maxTessellationControlPerVertexOutputComponents = {};
uint32_t maxTessellationControlPerPatchOutputComponents = {};
uint32_t maxTessellationControlTotalOutputComponents = {};
uint32_t maxTessellationEvaluationInputComponents = {};
uint32_t maxTessellationEvaluationOutputComponents = {};
uint32_t maxGeometryShaderInvocations = {};
uint32_t maxGeometryInputComponents = {};
uint32_t maxGeometryOutputComponents = {};
uint32_t maxGeometryOutputVertices = {};
uint32_t maxGeometryTotalOutputComponents = {};
uint32_t maxFragmentInputComponents = {};
uint32_t maxFragmentOutputAttachments = {};
uint32_t maxFragmentDualSrcAttachments = {};
uint32_t maxFragmentCombinedOutputResources = {};
uint32_t maxComputeSharedMemorySize = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupCount = {};
uint32_t maxComputeWorkGroupInvocations = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupSize = {};
uint32_t subPixelPrecisionBits = {};
uint32_t subTexelPrecisionBits = {};
uint32_t mipmapPrecisionBits = {};
uint32_t maxDrawIndexedIndexValue = {};
uint32_t maxDrawIndirectCount = {};
float maxSamplerLodBias = {};
float maxSamplerAnisotropy = {};
uint32_t maxViewports = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 2> maxViewportDimensions = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> viewportBoundsRange = {};
uint32_t viewportSubPixelBits = {};
size_t minMemoryMapAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {};
int32_t minTexelOffset = {};
uint32_t maxTexelOffset = {};
int32_t minTexelGatherOffset = {};
uint32_t maxTexelGatherOffset = {};
float minInterpolationOffset = {};
float maxInterpolationOffset = {};
uint32_t subPixelInterpolationOffsetBits = {};
uint32_t maxFramebufferWidth = {};
uint32_t maxFramebufferHeight = {};
uint32_t maxFramebufferLayers = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {};
uint32_t maxColorAttachments = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {};
uint32_t maxSampleMaskWords = {};
VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {};
float timestampPeriod = {};
uint32_t maxClipDistances = {};
uint32_t maxCullDistances = {};
uint32_t maxCombinedClipAndCullDistances = {};
uint32_t discreteQueuePriorities = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> pointSizeRange = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> lineWidthRange = {};
float pointSizeGranularity = {};
float lineWidthGranularity = {};
VULKAN_HPP_NAMESPACE::Bool32 strictLines = {};
VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {};
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {};
};
// wrapper struct for struct VkPhysicalDeviceSparseProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSparseProperties.html
struct PhysicalDeviceSparseProperties
{
using NativeType = VkPhysicalDeviceSparseProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties(VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {}) VULKAN_HPP_NOEXCEPT
: residencyStandard2DBlockShape{ residencyStandard2DBlockShape_ }, residencyStandard2DMultisampleBlockShape{ residencyStandard2DMultisampleBlockShape_ }, residencyStandard3DBlockShape{ residencyStandard3DBlockShape_ }, residencyAlignedMipSize{ residencyAlignedMipSize_ }, residencyNonResidentStrict{ residencyNonResidentStrict_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSparseProperties( *reinterpret_cast<PhysicalDeviceSparseProperties const *>( &rhs ) )
{}
PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>( this );
}
operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSparseProperties*>( this );
}
operator VkPhysicalDeviceSparseProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSparseProperties*>( this );
}
operator VkPhysicalDeviceSparseProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSparseProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( residencyStandard2DBlockShape, residencyStandard2DMultisampleBlockShape, residencyStandard3DBlockShape, residencyAlignedMipSize, residencyNonResidentStrict );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
&& ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
&& ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
&& ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
&& ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
#endif
}
bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {};
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {};
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {};
VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {};
VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {};
};
// wrapper struct for struct VkPhysicalDeviceProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProperties.html
struct PhysicalDeviceProperties
{
using NativeType = VkPhysicalDeviceProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties(uint32_t apiVersion_ = {}, uint32_t driverVersion_ = {}, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, std::array<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const & deviceName_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & pipelineCacheUUID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {}) VULKAN_HPP_NOEXCEPT
: apiVersion{ apiVersion_ }, driverVersion{ driverVersion_ }, vendorID{ vendorID_ }, deviceID{ deviceID_ }, deviceType{ deviceType_ }, deviceName{ deviceName_ }, pipelineCacheUUID{ pipelineCacheUUID_ }, limits{ limits_ }, sparseProperties{ sparseProperties_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceProperties( *reinterpret_cast<PhysicalDeviceProperties const *>( &rhs ) )
{}
PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProperties*>( this );
}
operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceProperties*>( this );
}
operator VkPhysicalDeviceProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceProperties*>( this );
}
operator VkPhysicalDeviceProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceType const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::partial_ordering operator<=>( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) return cmp;
if ( auto cmp = driverVersion <=> rhs.driverVersion; cmp != 0 ) return cmp;
if ( auto cmp = vendorID <=> rhs.vendorID; cmp != 0 ) return cmp;
if ( auto cmp = deviceID <=> rhs.deviceID; cmp != 0 ) return cmp;
if ( auto cmp = deviceType <=> rhs.deviceType; cmp != 0 ) return cmp;
if ( auto cmp = strcmp( deviceName, rhs.deviceName ); cmp != 0 )
return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
if ( auto cmp = pipelineCacheUUID <=> rhs.pipelineCacheUUID; cmp != 0 ) return cmp;
if ( auto cmp = limits <=> rhs.limits; cmp != 0 ) return cmp;
if ( auto cmp = sparseProperties <=> rhs.sparseProperties; cmp != 0 ) return cmp;
return std::partial_ordering::equivalent;
}
#endif
bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( apiVersion == rhs.apiVersion )
&& ( driverVersion == rhs.driverVersion )
&& ( vendorID == rhs.vendorID )
&& ( deviceID == rhs.deviceID )
&& ( deviceType == rhs.deviceType )
&& ( strcmp( deviceName, rhs.deviceName ) == 0 )
&& ( pipelineCacheUUID == rhs.pipelineCacheUUID )
&& ( limits == rhs.limits )
&& ( sparseProperties == rhs.sparseProperties );
}
bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
uint32_t apiVersion = {};
uint32_t driverVersion = {};
uint32_t vendorID = {};
uint32_t deviceID = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> deviceName = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {};
};
// wrapper struct for struct VkPhysicalDeviceProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProperties2.html
struct PhysicalDeviceProperties2
{
using NativeType = VkPhysicalDeviceProperties2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, properties{ properties_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceProperties2( *reinterpret_cast<PhysicalDeviceProperties2 const *>( &rhs ) )
{}
PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProperties2*>( this );
}
operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceProperties2*>( this );
}
operator VkPhysicalDeviceProperties2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceProperties2*>( this );
}
operator VkPhysicalDeviceProperties2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceProperties2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, properties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceProperties2 const & ) const = default;
#else
bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( properties == rhs.properties );
#endif
}
bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceProperties2>
{
using Type = PhysicalDeviceProperties2;
};
using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
// wrapper struct for struct VkPhysicalDeviceLayeredApiVulkanPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiVulkanPropertiesKHR.html
struct PhysicalDeviceLayeredApiVulkanPropertiesKHR
{
using NativeType = VkPhysicalDeviceLayeredApiVulkanPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiVulkanPropertiesKHR(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, properties{ properties_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiVulkanPropertiesKHR( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLayeredApiVulkanPropertiesKHR( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceLayeredApiVulkanPropertiesKHR( *reinterpret_cast<PhysicalDeviceLayeredApiVulkanPropertiesKHR const *>( &rhs ) )
{}
PhysicalDeviceLayeredApiVulkanPropertiesKHR & operator=( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceLayeredApiVulkanPropertiesKHR & operator=( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiVulkanPropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLayeredApiVulkanPropertiesKHR*>( this );
}
operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLayeredApiVulkanPropertiesKHR*>( this );
}
operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceLayeredApiVulkanPropertiesKHR*>( this );
}
operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceLayeredApiVulkanPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, properties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( properties == rhs.properties );
#endif
}
bool operator!=( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR>
{
using Type = PhysicalDeviceLayeredApiVulkanPropertiesKHR;
};
// wrapper struct for struct VkPhysicalDeviceLayeredDriverPropertiesMSFT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredDriverPropertiesMSFT.html
struct PhysicalDeviceLayeredDriverPropertiesMSFT
{
using NativeType = VkPhysicalDeviceLayeredDriverPropertiesMSFT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT(VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT underlyingAPI_ = VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT::eNone, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, underlyingAPI{ underlyingAPI_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLayeredDriverPropertiesMSFT( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceLayeredDriverPropertiesMSFT( *reinterpret_cast<PhysicalDeviceLayeredDriverPropertiesMSFT const *>( &rhs ) )
{}
PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceLayeredDriverPropertiesMSFT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLayeredDriverPropertiesMSFT*>( this );
}
operator VkPhysicalDeviceLayeredDriverPropertiesMSFT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLayeredDriverPropertiesMSFT*>( this );
}
operator VkPhysicalDeviceLayeredDriverPropertiesMSFT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceLayeredDriverPropertiesMSFT*>( this );
}
operator VkPhysicalDeviceLayeredDriverPropertiesMSFT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceLayeredDriverPropertiesMSFT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, underlyingAPI );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceLayeredDriverPropertiesMSFT const & ) const = default;
#else
bool operator==( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( underlyingAPI == rhs.underlyingAPI );
#endif
}
bool operator!=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT underlyingAPI = VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT::eNone;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT>
{
using Type = PhysicalDeviceLayeredDriverPropertiesMSFT;
};
// wrapper struct for struct VkPhysicalDeviceLegacyDitheringFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLegacyDitheringFeaturesEXT.html
struct PhysicalDeviceLegacyDitheringFeaturesEXT
{
using NativeType = VkPhysicalDeviceLegacyDitheringFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, legacyDithering{ legacyDithering_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLegacyDitheringFeaturesEXT( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceLegacyDitheringFeaturesEXT( *reinterpret_cast<PhysicalDeviceLegacyDitheringFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setLegacyDithering( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ ) VULKAN_HPP_NOEXCEPT
{
legacyDithering = legacyDithering_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLegacyDitheringFeaturesEXT*>( this );
}
operator VkPhysicalDeviceLegacyDitheringFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLegacyDitheringFeaturesEXT*>( this );
}
operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceLegacyDitheringFeaturesEXT*>( this );
}
operator VkPhysicalDeviceLegacyDitheringFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceLegacyDitheringFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, legacyDithering );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceLegacyDitheringFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( legacyDithering == rhs.legacyDithering );
#endif
}
bool operator!=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 legacyDithering = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT>
{
using Type = PhysicalDeviceLegacyDitheringFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT.html
struct PhysicalDeviceLegacyVertexAttributesFeaturesEXT
{
using NativeType = VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 legacyVertexAttributes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, legacyVertexAttributes{ legacyVertexAttributes_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesFeaturesEXT( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLegacyVertexAttributesFeaturesEXT( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceLegacyVertexAttributesFeaturesEXT( *reinterpret_cast<PhysicalDeviceLegacyVertexAttributesFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceLegacyVertexAttributesFeaturesEXT & operator=( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceLegacyVertexAttributesFeaturesEXT & operator=( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT & setLegacyVertexAttributes( VULKAN_HPP_NAMESPACE::Bool32 legacyVertexAttributes_ ) VULKAN_HPP_NOEXCEPT
{
legacyVertexAttributes = legacyVertexAttributes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT*>( this );
}
operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT*>( this );
}
operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT*>( this );
}
operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, legacyVertexAttributes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( legacyVertexAttributes == rhs.legacyVertexAttributes );
#endif
}
bool operator!=( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 legacyVertexAttributes = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT>
{
using Type = PhysicalDeviceLegacyVertexAttributesFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT.html
struct PhysicalDeviceLegacyVertexAttributesPropertiesEXT
{
using NativeType = VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 nativeUnalignedPerformance_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, nativeUnalignedPerformance{ nativeUnalignedPerformance_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesPropertiesEXT( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLegacyVertexAttributesPropertiesEXT( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceLegacyVertexAttributesPropertiesEXT( *reinterpret_cast<PhysicalDeviceLegacyVertexAttributesPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceLegacyVertexAttributesPropertiesEXT & operator=( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceLegacyVertexAttributesPropertiesEXT & operator=( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT*>( this );
}
operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT*>( this );
}
operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT*>( this );
}
operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, nativeUnalignedPerformance );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( nativeUnalignedPerformance == rhs.nativeUnalignedPerformance );
#endif
}
bool operator!=( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 nativeUnalignedPerformance = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT>
{
using Type = PhysicalDeviceLegacyVertexAttributesPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceLineRasterizationFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLineRasterizationFeatures.html
struct PhysicalDeviceLineRasterizationFeatures
{
using NativeType = VkPhysicalDeviceLineRasterizationFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeatures(VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, rectangularLines{ rectangularLines_ }, bresenhamLines{ bresenhamLines_ }, smoothLines{ smoothLines_ }, stippledRectangularLines{ stippledRectangularLines_ }, stippledBresenhamLines{ stippledBresenhamLines_ }, stippledSmoothLines{ stippledSmoothLines_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeatures( PhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLineRasterizationFeatures( VkPhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceLineRasterizationFeatures( *reinterpret_cast<PhysicalDeviceLineRasterizationFeatures const *>( &rhs ) )
{}
PhysicalDeviceLineRasterizationFeatures & operator=( PhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceLineRasterizationFeatures & operator=( VkPhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
{
rectangularLines = rectangularLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
{
bresenhamLines = bresenhamLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
{
smoothLines = smoothLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
{
stippledRectangularLines = stippledRectangularLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
{
stippledBresenhamLines = stippledBresenhamLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
{
stippledSmoothLines = stippledSmoothLines_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceLineRasterizationFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeatures*>( this );
}
operator VkPhysicalDeviceLineRasterizationFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeatures*>( this );
}
operator VkPhysicalDeviceLineRasterizationFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeatures*>( this );
}
operator VkPhysicalDeviceLineRasterizationFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceLineRasterizationFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, rectangularLines, bresenhamLines, smoothLines, stippledRectangularLines, stippledBresenhamLines, stippledSmoothLines );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceLineRasterizationFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceLineRasterizationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( rectangularLines == rhs.rectangularLines )
&& ( bresenhamLines == rhs.bresenhamLines )
&& ( smoothLines == rhs.smoothLines )
&& ( stippledRectangularLines == rhs.stippledRectangularLines )
&& ( stippledBresenhamLines == rhs.stippledBresenhamLines )
&& ( stippledSmoothLines == rhs.stippledSmoothLines );
#endif
}
bool operator!=( PhysicalDeviceLineRasterizationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {};
VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {};
VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {};
VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {};
VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {};
VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationFeatures>
{
using Type = PhysicalDeviceLineRasterizationFeatures;
};
using PhysicalDeviceLineRasterizationFeaturesEXT = PhysicalDeviceLineRasterizationFeatures;
using PhysicalDeviceLineRasterizationFeaturesKHR = PhysicalDeviceLineRasterizationFeatures;
// wrapper struct for struct VkPhysicalDeviceLineRasterizationProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLineRasterizationProperties.html
struct PhysicalDeviceLineRasterizationProperties
{
using NativeType = VkPhysicalDeviceLineRasterizationProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationProperties(uint32_t lineSubPixelPrecisionBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, lineSubPixelPrecisionBits{ lineSubPixelPrecisionBits_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationProperties( PhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLineRasterizationProperties( VkPhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceLineRasterizationProperties( *reinterpret_cast<PhysicalDeviceLineRasterizationProperties const *>( &rhs ) )
{}
PhysicalDeviceLineRasterizationProperties & operator=( PhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceLineRasterizationProperties & operator=( VkPhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceLineRasterizationProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationProperties*>( this );
}
operator VkPhysicalDeviceLineRasterizationProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLineRasterizationProperties*>( this );
}
operator VkPhysicalDeviceLineRasterizationProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceLineRasterizationProperties*>( this );
}
operator VkPhysicalDeviceLineRasterizationProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceLineRasterizationProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, lineSubPixelPrecisionBits );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceLineRasterizationProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceLineRasterizationProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits );
#endif
}
bool operator!=( PhysicalDeviceLineRasterizationProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationProperties;
void * pNext = {};
uint32_t lineSubPixelPrecisionBits = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationProperties>
{
using Type = PhysicalDeviceLineRasterizationProperties;
};
using PhysicalDeviceLineRasterizationPropertiesEXT = PhysicalDeviceLineRasterizationProperties;
using PhysicalDeviceLineRasterizationPropertiesKHR = PhysicalDeviceLineRasterizationProperties;
// wrapper struct for struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLinearColorAttachmentFeaturesNV.html
struct PhysicalDeviceLinearColorAttachmentFeaturesNV
{
using NativeType = VkPhysicalDeviceLinearColorAttachmentFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, linearColorAttachment{ linearColorAttachment_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLinearColorAttachmentFeaturesNV( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceLinearColorAttachmentFeaturesNV( *reinterpret_cast<PhysicalDeviceLinearColorAttachmentFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setLinearColorAttachment( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ ) VULKAN_HPP_NOEXCEPT
{
linearColorAttachment = linearColorAttachment_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLinearColorAttachmentFeaturesNV*>( this );
}
operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLinearColorAttachmentFeaturesNV*>( this );
}
operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceLinearColorAttachmentFeaturesNV*>( this );
}
operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceLinearColorAttachmentFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, linearColorAttachment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceLinearColorAttachmentFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( linearColorAttachment == rhs.linearColorAttachment );
#endif
}
bool operator!=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV>
{
using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceMaintenance3Properties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance3Properties.html
struct PhysicalDeviceMaintenance3Properties
{
using NativeType = VkPhysicalDeviceMaintenance3Properties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties(uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxPerSetDescriptors{ maxPerSetDescriptors_ }, maxMemoryAllocationSize{ maxMemoryAllocationSize_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance3Properties( *reinterpret_cast<PhysicalDeviceMaintenance3Properties const *>( &rhs ) )
{}
PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties*>( this );
}
operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties*>( this );
}
operator VkPhysicalDeviceMaintenance3Properties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties*>( this );
}
operator VkPhysicalDeviceMaintenance3Properties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMaintenance3Properties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxPerSetDescriptors, maxMemoryAllocationSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default;
#else
bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
&& ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
#endif
}
bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
void * pNext = {};
uint32_t maxPerSetDescriptors = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance3Properties>
{
using Type = PhysicalDeviceMaintenance3Properties;
};
using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
// wrapper struct for struct VkPhysicalDeviceMaintenance4Features, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance4Features.html
struct PhysicalDeviceMaintenance4Features
{
using NativeType = VkPhysicalDeviceMaintenance4Features;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Features;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features(VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maintenance4{ maintenance4_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance4Features( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance4Features( *reinterpret_cast<PhysicalDeviceMaintenance4Features const *>( &rhs ) )
{}
PhysicalDeviceMaintenance4Features & operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMaintenance4Features & operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
{
maintenance4 = maintenance4_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance4Features*>( this );
}
operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance4Features*>( this );
}
operator VkPhysicalDeviceMaintenance4Features const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMaintenance4Features*>( this );
}
operator VkPhysicalDeviceMaintenance4Features *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMaintenance4Features*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maintenance4 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMaintenance4Features const & ) const = default;
#else
bool operator==( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maintenance4 == rhs.maintenance4 );
#endif
}
bool operator!=( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Features;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance4Features>
{
using Type = PhysicalDeviceMaintenance4Features;
};
using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features;
// wrapper struct for struct VkPhysicalDeviceMaintenance4Properties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance4Properties.html
struct PhysicalDeviceMaintenance4Properties
{
using NativeType = VkPhysicalDeviceMaintenance4Properties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Properties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties(VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxBufferSize{ maxBufferSize_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance4Properties( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance4Properties( *reinterpret_cast<PhysicalDeviceMaintenance4Properties const *>( &rhs ) )
{}
PhysicalDeviceMaintenance4Properties & operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMaintenance4Properties & operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance4Properties*>( this );
}
operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance4Properties*>( this );
}
operator VkPhysicalDeviceMaintenance4Properties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMaintenance4Properties*>( this );
}
operator VkPhysicalDeviceMaintenance4Properties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMaintenance4Properties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxBufferSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMaintenance4Properties const & ) const = default;
#else
bool operator==( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxBufferSize == rhs.maxBufferSize );
#endif
}
bool operator!=( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Properties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance4Properties>
{
using Type = PhysicalDeviceMaintenance4Properties;
};
using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties;
// wrapper struct for struct VkPhysicalDeviceMaintenance5Features, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance5Features.html
struct PhysicalDeviceMaintenance5Features
{
using NativeType = VkPhysicalDeviceMaintenance5Features;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5Features;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Features(VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maintenance5{ maintenance5_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Features( PhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance5Features( VkPhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance5Features( *reinterpret_cast<PhysicalDeviceMaintenance5Features const *>( &rhs ) )
{}
PhysicalDeviceMaintenance5Features & operator=( PhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMaintenance5Features & operator=( VkPhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Features const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features & setMaintenance5( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT
{
maintenance5 = maintenance5_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMaintenance5Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance5Features*>( this );
}
operator VkPhysicalDeviceMaintenance5Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance5Features*>( this );
}
operator VkPhysicalDeviceMaintenance5Features const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMaintenance5Features*>( this );
}
operator VkPhysicalDeviceMaintenance5Features *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMaintenance5Features*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maintenance5 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMaintenance5Features const & ) const = default;
#else
bool operator==( PhysicalDeviceMaintenance5Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maintenance5 == rhs.maintenance5 );
#endif
}
bool operator!=( PhysicalDeviceMaintenance5Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5Features;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 maintenance5 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance5Features>
{
using Type = PhysicalDeviceMaintenance5Features;
};
using PhysicalDeviceMaintenance5FeaturesKHR = PhysicalDeviceMaintenance5Features;
// wrapper struct for struct VkPhysicalDeviceMaintenance5Properties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance5Properties.html
struct PhysicalDeviceMaintenance5Properties
{
using NativeType = VkPhysicalDeviceMaintenance5Properties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5Properties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Properties(VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {}, VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport_ = {}, VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, earlyFragmentMultisampleCoverageAfterSampleCounting{ earlyFragmentMultisampleCoverageAfterSampleCounting_ }, earlyFragmentSampleMaskTestBeforeSampleCounting{ earlyFragmentSampleMaskTestBeforeSampleCounting_ }, depthStencilSwizzleOneSupport{ depthStencilSwizzleOneSupport_ }, polygonModePointSize{ polygonModePointSize_ }, nonStrictSinglePixelWideLinesUseParallelogram{ nonStrictSinglePixelWideLinesUseParallelogram_ }, nonStrictWideLinesUseParallelogram{ nonStrictWideLinesUseParallelogram_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Properties( PhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance5Properties( VkPhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance5Properties( *reinterpret_cast<PhysicalDeviceMaintenance5Properties const *>( &rhs ) )
{}
PhysicalDeviceMaintenance5Properties & operator=( PhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMaintenance5Properties & operator=( VkPhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Properties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMaintenance5Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance5Properties*>( this );
}
operator VkPhysicalDeviceMaintenance5Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance5Properties*>( this );
}
operator VkPhysicalDeviceMaintenance5Properties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMaintenance5Properties*>( this );
}
operator VkPhysicalDeviceMaintenance5Properties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMaintenance5Properties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, earlyFragmentMultisampleCoverageAfterSampleCounting, earlyFragmentSampleMaskTestBeforeSampleCounting, depthStencilSwizzleOneSupport, polygonModePointSize, nonStrictSinglePixelWideLinesUseParallelogram, nonStrictWideLinesUseParallelogram );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMaintenance5Properties const & ) const = default;
#else
bool operator==( PhysicalDeviceMaintenance5Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting )
&& ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting )
&& ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport )
&& ( polygonModePointSize == rhs.polygonModePointSize )
&& ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram )
&& ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram );
#endif
}
bool operator!=( PhysicalDeviceMaintenance5Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5Properties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting = {};
VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting = {};
VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport = {};
VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize = {};
VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram = {};
VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance5Properties>
{
using Type = PhysicalDeviceMaintenance5Properties;
};
using PhysicalDeviceMaintenance5PropertiesKHR = PhysicalDeviceMaintenance5Properties;
// wrapper struct for struct VkPhysicalDeviceMaintenance6Features, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance6Features.html
struct PhysicalDeviceMaintenance6Features
{
using NativeType = VkPhysicalDeviceMaintenance6Features;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6Features;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Features(VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maintenance6{ maintenance6_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Features( PhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance6Features( VkPhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance6Features( *reinterpret_cast<PhysicalDeviceMaintenance6Features const *>( &rhs ) )
{}
PhysicalDeviceMaintenance6Features & operator=( PhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMaintenance6Features & operator=( VkPhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Features const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features & setMaintenance6( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ ) VULKAN_HPP_NOEXCEPT
{
maintenance6 = maintenance6_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMaintenance6Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance6Features*>( this );
}
operator VkPhysicalDeviceMaintenance6Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance6Features*>( this );
}
operator VkPhysicalDeviceMaintenance6Features const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMaintenance6Features*>( this );
}
operator VkPhysicalDeviceMaintenance6Features *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMaintenance6Features*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maintenance6 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMaintenance6Features const & ) const = default;
#else
bool operator==( PhysicalDeviceMaintenance6Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maintenance6 == rhs.maintenance6 );
#endif
}
bool operator!=( PhysicalDeviceMaintenance6Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance6Features;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 maintenance6 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance6Features>
{
using Type = PhysicalDeviceMaintenance6Features;
};
using PhysicalDeviceMaintenance6FeaturesKHR = PhysicalDeviceMaintenance6Features;
// wrapper struct for struct VkPhysicalDeviceMaintenance6Properties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance6Properties.html
struct PhysicalDeviceMaintenance6Properties
{
using NativeType = VkPhysicalDeviceMaintenance6Properties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6Properties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Properties(VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers_ = {}, uint32_t maxCombinedImageSamplerDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, blockTexelViewCompatibleMultipleLayers{ blockTexelViewCompatibleMultipleLayers_ }, maxCombinedImageSamplerDescriptorCount{ maxCombinedImageSamplerDescriptorCount_ }, fragmentShadingRateClampCombinerInputs{ fragmentShadingRateClampCombinerInputs_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Properties( PhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance6Properties( VkPhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance6Properties( *reinterpret_cast<PhysicalDeviceMaintenance6Properties const *>( &rhs ) )
{}
PhysicalDeviceMaintenance6Properties & operator=( PhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMaintenance6Properties & operator=( VkPhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Properties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMaintenance6Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance6Properties*>( this );
}
operator VkPhysicalDeviceMaintenance6Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance6Properties*>( this );
}
operator VkPhysicalDeviceMaintenance6Properties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMaintenance6Properties*>( this );
}
operator VkPhysicalDeviceMaintenance6Properties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMaintenance6Properties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, blockTexelViewCompatibleMultipleLayers, maxCombinedImageSamplerDescriptorCount, fragmentShadingRateClampCombinerInputs );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMaintenance6Properties const & ) const = default;
#else
bool operator==( PhysicalDeviceMaintenance6Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( blockTexelViewCompatibleMultipleLayers == rhs.blockTexelViewCompatibleMultipleLayers )
&& ( maxCombinedImageSamplerDescriptorCount == rhs.maxCombinedImageSamplerDescriptorCount )
&& ( fragmentShadingRateClampCombinerInputs == rhs.fragmentShadingRateClampCombinerInputs );
#endif
}
bool operator!=( PhysicalDeviceMaintenance6Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance6Properties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers = {};
uint32_t maxCombinedImageSamplerDescriptorCount = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance6Properties>
{
using Type = PhysicalDeviceMaintenance6Properties;
};
using PhysicalDeviceMaintenance6PropertiesKHR = PhysicalDeviceMaintenance6Properties;
// wrapper struct for struct VkPhysicalDeviceMaintenance7FeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance7FeaturesKHR.html
struct PhysicalDeviceMaintenance7FeaturesKHR
{
using NativeType = VkPhysicalDeviceMaintenance7FeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance7FeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 maintenance7_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maintenance7{ maintenance7_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7FeaturesKHR( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance7FeaturesKHR( VkPhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance7FeaturesKHR( *reinterpret_cast<PhysicalDeviceMaintenance7FeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceMaintenance7FeaturesKHR & operator=( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMaintenance7FeaturesKHR & operator=( VkPhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7FeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR & setMaintenance7( VULKAN_HPP_NAMESPACE::Bool32 maintenance7_ ) VULKAN_HPP_NOEXCEPT
{
maintenance7 = maintenance7_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMaintenance7FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance7FeaturesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance7FeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance7FeaturesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance7FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMaintenance7FeaturesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance7FeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMaintenance7FeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maintenance7 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMaintenance7FeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maintenance7 == rhs.maintenance7 );
#endif
}
bool operator!=( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance7FeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 maintenance7 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance7FeaturesKHR>
{
using Type = PhysicalDeviceMaintenance7FeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceMaintenance7PropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance7PropertiesKHR.html
struct PhysicalDeviceMaintenance7PropertiesKHR
{
using NativeType = VkPhysicalDeviceMaintenance7PropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance7PropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7PropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 robustFragmentShadingRateAttachmentAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilAttachmentAccess_ = {}, uint32_t maxDescriptorSetTotalUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetTotalStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetTotalBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, robustFragmentShadingRateAttachmentAccess{ robustFragmentShadingRateAttachmentAccess_ }, separateDepthStencilAttachmentAccess{ separateDepthStencilAttachmentAccess_ }, maxDescriptorSetTotalUniformBuffersDynamic{ maxDescriptorSetTotalUniformBuffersDynamic_ }, maxDescriptorSetTotalStorageBuffersDynamic{ maxDescriptorSetTotalStorageBuffersDynamic_ }, maxDescriptorSetTotalBuffersDynamic{ maxDescriptorSetTotalBuffersDynamic_ }, maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic_ }, maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic_ }, maxDescriptorSetUpdateAfterBindTotalBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalBuffersDynamic_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7PropertiesKHR( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance7PropertiesKHR( VkPhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance7PropertiesKHR( *reinterpret_cast<PhysicalDeviceMaintenance7PropertiesKHR const *>( &rhs ) )
{}
PhysicalDeviceMaintenance7PropertiesKHR & operator=( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMaintenance7PropertiesKHR & operator=( VkPhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7PropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMaintenance7PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance7PropertiesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance7PropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance7PropertiesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance7PropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMaintenance7PropertiesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance7PropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMaintenance7PropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, robustFragmentShadingRateAttachmentAccess, separateDepthStencilAttachmentAccess, maxDescriptorSetTotalUniformBuffersDynamic, maxDescriptorSetTotalStorageBuffersDynamic, maxDescriptorSetTotalBuffersDynamic, maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic, maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic, maxDescriptorSetUpdateAfterBindTotalBuffersDynamic );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMaintenance7PropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( robustFragmentShadingRateAttachmentAccess == rhs.robustFragmentShadingRateAttachmentAccess )
&& ( separateDepthStencilAttachmentAccess == rhs.separateDepthStencilAttachmentAccess )
&& ( maxDescriptorSetTotalUniformBuffersDynamic == rhs.maxDescriptorSetTotalUniformBuffersDynamic )
&& ( maxDescriptorSetTotalStorageBuffersDynamic == rhs.maxDescriptorSetTotalStorageBuffersDynamic )
&& ( maxDescriptorSetTotalBuffersDynamic == rhs.maxDescriptorSetTotalBuffersDynamic )
&& ( maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic )
&& ( maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic )
&& ( maxDescriptorSetUpdateAfterBindTotalBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalBuffersDynamic );
#endif
}
bool operator!=( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance7PropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 robustFragmentShadingRateAttachmentAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilAttachmentAccess = {};
uint32_t maxDescriptorSetTotalUniformBuffersDynamic = {};
uint32_t maxDescriptorSetTotalStorageBuffersDynamic = {};
uint32_t maxDescriptorSetTotalBuffersDynamic = {};
uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic = {};
uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic = {};
uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance7PropertiesKHR>
{
using Type = PhysicalDeviceMaintenance7PropertiesKHR;
};
// wrapper struct for struct VkPhysicalDeviceMaintenance8FeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance8FeaturesKHR.html
struct PhysicalDeviceMaintenance8FeaturesKHR
{
using NativeType = VkPhysicalDeviceMaintenance8FeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance8FeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance8FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 maintenance8_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maintenance8{ maintenance8_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance8FeaturesKHR( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance8FeaturesKHR( VkPhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance8FeaturesKHR( *reinterpret_cast<PhysicalDeviceMaintenance8FeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceMaintenance8FeaturesKHR & operator=( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMaintenance8FeaturesKHR & operator=( VkPhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance8FeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance8FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance8FeaturesKHR & setMaintenance8( VULKAN_HPP_NAMESPACE::Bool32 maintenance8_ ) VULKAN_HPP_NOEXCEPT
{
maintenance8 = maintenance8_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMaintenance8FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance8FeaturesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance8FeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance8FeaturesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance8FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMaintenance8FeaturesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance8FeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMaintenance8FeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maintenance8 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMaintenance8FeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maintenance8 == rhs.maintenance8 );
#endif
}
bool operator!=( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance8FeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 maintenance8 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance8FeaturesKHR>
{
using Type = PhysicalDeviceMaintenance8FeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceMaintenance9FeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance9FeaturesKHR.html
struct PhysicalDeviceMaintenance9FeaturesKHR
{
using NativeType = VkPhysicalDeviceMaintenance9FeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance9FeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance9FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 maintenance9_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maintenance9{ maintenance9_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance9FeaturesKHR( PhysicalDeviceMaintenance9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance9FeaturesKHR( VkPhysicalDeviceMaintenance9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance9FeaturesKHR( *reinterpret_cast<PhysicalDeviceMaintenance9FeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceMaintenance9FeaturesKHR & operator=( PhysicalDeviceMaintenance9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMaintenance9FeaturesKHR & operator=( VkPhysicalDeviceMaintenance9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9FeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance9FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance9FeaturesKHR & setMaintenance9( VULKAN_HPP_NAMESPACE::Bool32 maintenance9_ ) VULKAN_HPP_NOEXCEPT
{
maintenance9 = maintenance9_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMaintenance9FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance9FeaturesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance9FeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance9FeaturesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance9FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMaintenance9FeaturesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance9FeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMaintenance9FeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maintenance9 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMaintenance9FeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceMaintenance9FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maintenance9 == rhs.maintenance9 );
#endif
}
bool operator!=( PhysicalDeviceMaintenance9FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance9FeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 maintenance9 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance9FeaturesKHR>
{
using Type = PhysicalDeviceMaintenance9FeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceMaintenance9PropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance9PropertiesKHR.html
struct PhysicalDeviceMaintenance9PropertiesKHR
{
using NativeType = VkPhysicalDeviceMaintenance9PropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance9PropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance9PropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3DSparse_ = {}, VULKAN_HPP_NAMESPACE::DefaultVertexAttributeValueKHR defaultVertexAttributeValue_ = VULKAN_HPP_NAMESPACE::DefaultVertexAttributeValueKHR::eZeroZeroZeroZero, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, image2DViewOf3DSparse{ image2DViewOf3DSparse_ }, defaultVertexAttributeValue{ defaultVertexAttributeValue_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance9PropertiesKHR( PhysicalDeviceMaintenance9PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance9PropertiesKHR( VkPhysicalDeviceMaintenance9PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance9PropertiesKHR( *reinterpret_cast<PhysicalDeviceMaintenance9PropertiesKHR const *>( &rhs ) )
{}
PhysicalDeviceMaintenance9PropertiesKHR & operator=( PhysicalDeviceMaintenance9PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMaintenance9PropertiesKHR & operator=( VkPhysicalDeviceMaintenance9PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9PropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMaintenance9PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance9PropertiesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance9PropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance9PropertiesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance9PropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMaintenance9PropertiesKHR*>( this );
}
operator VkPhysicalDeviceMaintenance9PropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMaintenance9PropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DefaultVertexAttributeValueKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, image2DViewOf3DSparse, defaultVertexAttributeValue );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMaintenance9PropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceMaintenance9PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( image2DViewOf3DSparse == rhs.image2DViewOf3DSparse )
&& ( defaultVertexAttributeValue == rhs.defaultVertexAttributeValue );
#endif
}
bool operator!=( PhysicalDeviceMaintenance9PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance9PropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3DSparse = {};
VULKAN_HPP_NAMESPACE::DefaultVertexAttributeValueKHR defaultVertexAttributeValue = VULKAN_HPP_NAMESPACE::DefaultVertexAttributeValueKHR::eZeroZeroZeroZero;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance9PropertiesKHR>
{
using Type = PhysicalDeviceMaintenance9PropertiesKHR;
};
// wrapper struct for struct VkPhysicalDeviceMapMemoryPlacedFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMapMemoryPlacedFeaturesEXT.html
struct PhysicalDeviceMapMemoryPlacedFeaturesEXT
{
using NativeType = VkPhysicalDeviceMapMemoryPlacedFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 memoryMapPlaced_ = {}, VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced_ = {}, VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryMapPlaced{ memoryMapPlaced_ }, memoryMapRangePlaced{ memoryMapRangePlaced_ }, memoryUnmapReserve{ memoryUnmapReserve_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedFeaturesEXT( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMapMemoryPlacedFeaturesEXT( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMapMemoryPlacedFeaturesEXT( *reinterpret_cast<PhysicalDeviceMapMemoryPlacedFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceMapMemoryPlacedFeaturesEXT & operator=( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMapMemoryPlacedFeaturesEXT & operator=( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setMemoryMapPlaced( VULKAN_HPP_NAMESPACE::Bool32 memoryMapPlaced_ ) VULKAN_HPP_NOEXCEPT
{
memoryMapPlaced = memoryMapPlaced_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setMemoryMapRangePlaced( VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced_ ) VULKAN_HPP_NOEXCEPT
{
memoryMapRangePlaced = memoryMapRangePlaced_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setMemoryUnmapReserve( VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve_ ) VULKAN_HPP_NOEXCEPT
{
memoryUnmapReserve = memoryUnmapReserve_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMapMemoryPlacedFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMapMemoryPlacedFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMapMemoryPlacedFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMapMemoryPlacedFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryMapPlaced, memoryMapRangePlaced, memoryUnmapReserve );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryMapPlaced == rhs.memoryMapPlaced )
&& ( memoryMapRangePlaced == rhs.memoryMapRangePlaced )
&& ( memoryUnmapReserve == rhs.memoryUnmapReserve );
#endif
}
bool operator!=( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 memoryMapPlaced = {};
VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced = {};
VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT>
{
using Type = PhysicalDeviceMapMemoryPlacedFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceMapMemoryPlacedPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMapMemoryPlacedPropertiesEXT.html
struct PhysicalDeviceMapMemoryPlacedPropertiesEXT
{
using NativeType = VkPhysicalDeviceMapMemoryPlacedPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize minPlacedMemoryMapAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minPlacedMemoryMapAlignment{ minPlacedMemoryMapAlignment_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMapMemoryPlacedPropertiesEXT( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMapMemoryPlacedPropertiesEXT( *reinterpret_cast<PhysicalDeviceMapMemoryPlacedPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMapMemoryPlacedPropertiesEXT*>( this );
}
operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMapMemoryPlacedPropertiesEXT*>( this );
}
operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMapMemoryPlacedPropertiesEXT*>( this );
}
operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMapMemoryPlacedPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minPlacedMemoryMapAlignment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minPlacedMemoryMapAlignment == rhs.minPlacedMemoryMapAlignment );
#endif
}
bool operator!=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize minPlacedMemoryMapAlignment = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT>
{
using Type = PhysicalDeviceMapMemoryPlacedPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceMemoryBudgetPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryBudgetPropertiesEXT.html
struct PhysicalDeviceMemoryBudgetPropertiesEXT
{
using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const & heapBudget_ = {}, std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const & heapUsage_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, heapBudget{ heapBudget_ }, heapUsage{ heapUsage_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMemoryBudgetPropertiesEXT( *reinterpret_cast<PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
}
operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
}
operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
}
operator VkPhysicalDeviceMemoryBudgetPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, heapBudget, heapUsage );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( heapBudget == rhs.heapBudget )
&& ( heapUsage == rhs.heapUsage );
#endif
}
bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapBudget = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapUsage = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT>
{
using Type = PhysicalDeviceMemoryBudgetPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceMemoryDecompressionFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryDecompressionFeaturesNV.html
struct PhysicalDeviceMemoryDecompressionFeaturesNV
{
using NativeType = VkPhysicalDeviceMemoryDecompressionFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryDecompression{ memoryDecompression_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryDecompressionFeaturesNV( VkPhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMemoryDecompressionFeaturesNV( *reinterpret_cast<PhysicalDeviceMemoryDecompressionFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceMemoryDecompressionFeaturesNV & operator=( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMemoryDecompressionFeaturesNV & operator=( VkPhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesNV & setMemoryDecompression( VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ ) VULKAN_HPP_NOEXCEPT
{
memoryDecompression = memoryDecompression_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMemoryDecompressionFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMemoryDecompressionFeaturesNV*>( this );
}
operator VkPhysicalDeviceMemoryDecompressionFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMemoryDecompressionFeaturesNV*>( this );
}
operator VkPhysicalDeviceMemoryDecompressionFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMemoryDecompressionFeaturesNV*>( this );
}
operator VkPhysicalDeviceMemoryDecompressionFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMemoryDecompressionFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryDecompression );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMemoryDecompressionFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryDecompression == rhs.memoryDecompression );
#endif
}
bool operator!=( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV>
{
using Type = PhysicalDeviceMemoryDecompressionFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceMemoryDecompressionPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryDecompressionPropertiesNV.html
struct PhysicalDeviceMemoryDecompressionPropertiesNV
{
using NativeType = VkPhysicalDeviceMemoryDecompressionPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionPropertiesNV(VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods_ = {}, uint64_t maxDecompressionIndirectCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, decompressionMethods{ decompressionMethods_ }, maxDecompressionIndirectCount{ maxDecompressionIndirectCount_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionPropertiesNV( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryDecompressionPropertiesNV( VkPhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMemoryDecompressionPropertiesNV( *reinterpret_cast<PhysicalDeviceMemoryDecompressionPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceMemoryDecompressionPropertiesNV & operator=( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMemoryDecompressionPropertiesNV & operator=( VkPhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMemoryDecompressionPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMemoryDecompressionPropertiesNV*>( this );
}
operator VkPhysicalDeviceMemoryDecompressionPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMemoryDecompressionPropertiesNV*>( this );
}
operator VkPhysicalDeviceMemoryDecompressionPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMemoryDecompressionPropertiesNV*>( this );
}
operator VkPhysicalDeviceMemoryDecompressionPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMemoryDecompressionPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, decompressionMethods, maxDecompressionIndirectCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMemoryDecompressionPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( decompressionMethods == rhs.decompressionMethods )
&& ( maxDecompressionIndirectCount == rhs.maxDecompressionIndirectCount );
#endif
}
bool operator!=( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods = {};
uint64_t maxDecompressionIndirectCount = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV>
{
using Type = PhysicalDeviceMemoryDecompressionPropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceMemoryPriorityFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryPriorityFeaturesEXT.html
struct PhysicalDeviceMemoryPriorityFeaturesEXT
{
using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryPriority{ memoryPriority_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMemoryPriorityFeaturesEXT( *reinterpret_cast<PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
{
memoryPriority = memoryPriority_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMemoryPriorityFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryPriority );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryPriority == rhs.memoryPriority );
#endif
}
bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT>
{
using Type = PhysicalDeviceMemoryPriorityFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceMemoryProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryProperties.html
struct PhysicalDeviceMemoryProperties
{
using NativeType = VkPhysicalDeviceMemoryProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties(uint32_t memoryTypeCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES> const & memoryTypes_ = {}, uint32_t memoryHeapCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS> const & memoryHeaps_ = {}) VULKAN_HPP_NOEXCEPT
: memoryTypeCount{ memoryTypeCount_ }, memoryTypes{ memoryTypes_ }, memoryHeapCount{ memoryHeapCount_ }, memoryHeaps{ memoryHeaps_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMemoryProperties( *reinterpret_cast<PhysicalDeviceMemoryProperties const *>( &rhs ) )
{}
PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>( this );
}
operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( this );
}
operator VkPhysicalDeviceMemoryProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>( this );
}
operator VkPhysicalDeviceMemoryProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( memoryTypeCount, memoryTypes, memoryHeapCount, memoryHeaps );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = memoryTypeCount <=> rhs.memoryTypeCount; cmp != 0 ) return cmp;
for ( size_t i = 0; i < memoryTypeCount; ++i )
{
if ( auto cmp = memoryTypes[i] <=> rhs.memoryTypes[i]; cmp != 0 ) return cmp;
}
if ( auto cmp = memoryHeapCount <=> rhs.memoryHeapCount; cmp != 0 ) return cmp;
for ( size_t i = 0; i < memoryHeapCount; ++i )
{
if ( auto cmp = memoryHeaps[i] <=> rhs.memoryHeaps[i]; cmp != 0 ) return cmp;
}
return std::strong_ordering::equivalent;
}
#endif
bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( memoryTypeCount == rhs.memoryTypeCount )
&& ( memcmp( memoryTypes, rhs.memoryTypes, memoryTypeCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 )
&& ( memoryHeapCount == rhs.memoryHeapCount )
&& ( memcmp( memoryHeaps, rhs.memoryHeaps, memoryHeapCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 );
}
bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
uint32_t memoryTypeCount = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> memoryTypes = {};
uint32_t memoryHeapCount = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> memoryHeaps = {};
};
// wrapper struct for struct VkPhysicalDeviceMemoryProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryProperties2.html
struct PhysicalDeviceMemoryProperties2
{
using NativeType = VkPhysicalDeviceMemoryProperties2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryProperties{ memoryProperties_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMemoryProperties2( *reinterpret_cast<PhysicalDeviceMemoryProperties2 const *>( &rhs ) )
{}
PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2*>( this );
}
operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( this );
}
operator VkPhysicalDeviceMemoryProperties2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMemoryProperties2*>( this );
}
operator VkPhysicalDeviceMemoryProperties2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default;
#else
bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryProperties == rhs.memoryProperties );
#endif
}
bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryProperties2>
{
using Type = PhysicalDeviceMemoryProperties2;
};
using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
// wrapper struct for struct VkPhysicalDeviceMeshShaderFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderFeaturesEXT.html
struct PhysicalDeviceMeshShaderFeaturesEXT
{
using NativeType = VkPhysicalDeviceMeshShaderFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, taskShader{ taskShader_ }, meshShader{ meshShader_ }, multiviewMeshShader{ multiviewMeshShader_ }, primitiveFragmentShadingRateMeshShader{ primitiveFragmentShadingRateMeshShader_ }, meshShaderQueries{ meshShaderQueries_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMeshShaderFeaturesEXT( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMeshShaderFeaturesEXT( *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceMeshShaderFeaturesEXT & operator=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMeshShaderFeaturesEXT & operator=( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
{
taskShader = taskShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
{
meshShader = meshShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMultiviewMeshShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ ) VULKAN_HPP_NOEXCEPT
{
multiviewMeshShader = multiviewMeshShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setPrimitiveFragmentShadingRateMeshShader( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ ) VULKAN_HPP_NOEXCEPT
{
primitiveFragmentShadingRateMeshShader = primitiveFragmentShadingRateMeshShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShaderQueries( VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ ) VULKAN_HPP_NOEXCEPT
{
meshShaderQueries = meshShaderQueries_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMeshShaderFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMeshShaderFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMeshShaderFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMeshShaderFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, taskShader, meshShader, multiviewMeshShader, primitiveFragmentShadingRateMeshShader, meshShaderQueries );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMeshShaderFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( taskShader == rhs.taskShader )
&& ( meshShader == rhs.meshShader )
&& ( multiviewMeshShader == rhs.multiviewMeshShader )
&& ( primitiveFragmentShadingRateMeshShader == rhs.primitiveFragmentShadingRateMeshShader )
&& ( meshShaderQueries == rhs.meshShaderQueries );
#endif
}
bool operator!=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 taskShader = {};
VULKAN_HPP_NAMESPACE::Bool32 meshShader = {};
VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader = {};
VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader = {};
VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesEXT>
{
using Type = PhysicalDeviceMeshShaderFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceMeshShaderFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderFeaturesNV.html
struct PhysicalDeviceMeshShaderFeaturesNV
{
using NativeType = VkPhysicalDeviceMeshShaderFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, taskShader{ taskShader_ }, meshShader{ meshShader_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
{
taskShader = taskShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
{
meshShader = meshShader_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
}
operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
}
operator VkPhysicalDeviceMeshShaderFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
}
operator VkPhysicalDeviceMeshShaderFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, taskShader, meshShader );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( taskShader == rhs.taskShader )
&& ( meshShader == rhs.meshShader );
#endif
}
bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 taskShader = {};
VULKAN_HPP_NAMESPACE::Bool32 meshShader = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesNV>
{
using Type = PhysicalDeviceMeshShaderFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceMeshShaderPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderPropertiesEXT.html
struct PhysicalDeviceMeshShaderPropertiesEXT
{
using NativeType = VkPhysicalDeviceMeshShaderPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT(uint32_t maxTaskWorkGroupTotalCount_ = {}, std::array<uint32_t,3> const & maxTaskWorkGroupCount_ = {}, uint32_t maxTaskWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxTaskWorkGroupSize_ = {}, uint32_t maxTaskPayloadSize_ = {}, uint32_t maxTaskSharedMemorySize_ = {}, uint32_t maxTaskPayloadAndSharedMemorySize_ = {}, uint32_t maxMeshWorkGroupTotalCount_ = {}, std::array<uint32_t,3> const & maxMeshWorkGroupCount_ = {}, uint32_t maxMeshWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxMeshWorkGroupSize_ = {}, uint32_t maxMeshSharedMemorySize_ = {}, uint32_t maxMeshPayloadAndSharedMemorySize_ = {}, uint32_t maxMeshOutputMemorySize_ = {}, uint32_t maxMeshPayloadAndOutputMemorySize_ = {}, uint32_t maxMeshOutputComponents_ = {}, uint32_t maxMeshOutputVertices_ = {}, uint32_t maxMeshOutputPrimitives_ = {}, uint32_t maxMeshOutputLayers_ = {}, uint32_t maxMeshMultiviewViewCount_ = {}, uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}, uint32_t maxPreferredTaskWorkGroupInvocations_ = {}, uint32_t maxPreferredMeshWorkGroupInvocations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput_ = {}, VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput_ = {}, VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput_ = {}, VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxTaskWorkGroupTotalCount{ maxTaskWorkGroupTotalCount_ }, maxTaskWorkGroupCount{ maxTaskWorkGroupCount_ }, maxTaskWorkGroupInvocations{ maxTaskWorkGroupInvocations_ }, maxTaskWorkGroupSize{ maxTaskWorkGroupSize_ }, maxTaskPayloadSize{ maxTaskPayloadSize_ }, maxTaskSharedMemorySize{ maxTaskSharedMemorySize_ }, maxTaskPayloadAndSharedMemorySize{ maxTaskPayloadAndSharedMemorySize_ }, maxMeshWorkGroupTotalCount{ maxMeshWorkGroupTotalCount_ }, maxMeshWorkGroupCount{ maxMeshWorkGroupCount_ }, maxMeshWorkGroupInvocations{ maxMeshWorkGroupInvocations_ }, maxMeshWorkGroupSize{ maxMeshWorkGroupSize_ }, maxMeshSharedMemorySize{ maxMeshSharedMemorySize_ }, maxMeshPayloadAndSharedMemorySize{ maxMeshPayloadAndSharedMemorySize_ }, maxMeshOutputMemorySize{ maxMeshOutputMemorySize_ }, maxMeshPayloadAndOutputMemorySize{ maxMeshPayloadAndOutputMemorySize_ }, maxMeshOutputComponents{ maxMeshOutputComponents_ }, maxMeshOutputVertices{ maxMeshOutputVertices_ }, maxMeshOutputPrimitives{ maxMeshOutputPrimitives_ }, maxMeshOutputLayers{ maxMeshOutputLayers_ }, maxMeshMultiviewViewCount{ maxMeshMultiviewViewCount_ }, meshOutputPerVertexGranularity{ meshOutputPerVertexGranularity_ }, meshOutputPerPrimitiveGranularity{ meshOutputPerPrimitiveGranularity_ }, maxPreferredTaskWorkGroupInvocations{ maxPreferredTaskWorkGroupInvocations_ }, maxPreferredMeshWorkGroupInvocations{ maxPreferredMeshWorkGroupInvocations_ }, prefersLocalInvocationVertexOutput{ prefersLocalInvocationVertexOutput_ }, prefersLocalInvocationPrimitiveOutput{ prefersLocalInvocationPrimitiveOutput_ }, prefersCompactVertexOutput{ prefersCompactVertexOutput_ }, prefersCompactPrimitiveOutput{ prefersCompactPrimitiveOutput_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMeshShaderPropertiesEXT( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMeshShaderPropertiesEXT( *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceMeshShaderPropertiesEXT & operator=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMeshShaderPropertiesEXT & operator=( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMeshShaderPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesEXT*>( this );
}
operator VkPhysicalDeviceMeshShaderPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesEXT*>( this );
}
operator VkPhysicalDeviceMeshShaderPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesEXT*>( this );
}
operator VkPhysicalDeviceMeshShaderPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxTaskWorkGroupTotalCount, maxTaskWorkGroupCount, maxTaskWorkGroupInvocations, maxTaskWorkGroupSize, maxTaskPayloadSize, maxTaskSharedMemorySize, maxTaskPayloadAndSharedMemorySize, maxMeshWorkGroupTotalCount, maxMeshWorkGroupCount, maxMeshWorkGroupInvocations, maxMeshWorkGroupSize, maxMeshSharedMemorySize, maxMeshPayloadAndSharedMemorySize, maxMeshOutputMemorySize, maxMeshPayloadAndOutputMemorySize, maxMeshOutputComponents, maxMeshOutputVertices, maxMeshOutputPrimitives, maxMeshOutputLayers, maxMeshMultiviewViewCount, meshOutputPerVertexGranularity, meshOutputPerPrimitiveGranularity, maxPreferredTaskWorkGroupInvocations, maxPreferredMeshWorkGroupInvocations, prefersLocalInvocationVertexOutput, prefersLocalInvocationPrimitiveOutput, prefersCompactVertexOutput, prefersCompactPrimitiveOutput );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMeshShaderPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxTaskWorkGroupTotalCount == rhs.maxTaskWorkGroupTotalCount )
&& ( maxTaskWorkGroupCount == rhs.maxTaskWorkGroupCount )
&& ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations )
&& ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize )
&& ( maxTaskPayloadSize == rhs.maxTaskPayloadSize )
&& ( maxTaskSharedMemorySize == rhs.maxTaskSharedMemorySize )
&& ( maxTaskPayloadAndSharedMemorySize == rhs.maxTaskPayloadAndSharedMemorySize )
&& ( maxMeshWorkGroupTotalCount == rhs.maxMeshWorkGroupTotalCount )
&& ( maxMeshWorkGroupCount == rhs.maxMeshWorkGroupCount )
&& ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations )
&& ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize )
&& ( maxMeshSharedMemorySize == rhs.maxMeshSharedMemorySize )
&& ( maxMeshPayloadAndSharedMemorySize == rhs.maxMeshPayloadAndSharedMemorySize )
&& ( maxMeshOutputMemorySize == rhs.maxMeshOutputMemorySize )
&& ( maxMeshPayloadAndOutputMemorySize == rhs.maxMeshPayloadAndOutputMemorySize )
&& ( maxMeshOutputComponents == rhs.maxMeshOutputComponents )
&& ( maxMeshOutputVertices == rhs.maxMeshOutputVertices )
&& ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives )
&& ( maxMeshOutputLayers == rhs.maxMeshOutputLayers )
&& ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount )
&& ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity )
&& ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity )
&& ( maxPreferredTaskWorkGroupInvocations == rhs.maxPreferredTaskWorkGroupInvocations )
&& ( maxPreferredMeshWorkGroupInvocations == rhs.maxPreferredMeshWorkGroupInvocations )
&& ( prefersLocalInvocationVertexOutput == rhs.prefersLocalInvocationVertexOutput )
&& ( prefersLocalInvocationPrimitiveOutput == rhs.prefersLocalInvocationPrimitiveOutput )
&& ( prefersCompactVertexOutput == rhs.prefersCompactVertexOutput )
&& ( prefersCompactPrimitiveOutput == rhs.prefersCompactPrimitiveOutput );
#endif
}
bool operator!=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT;
void * pNext = {};
uint32_t maxTaskWorkGroupTotalCount = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupCount = {};
uint32_t maxTaskWorkGroupInvocations = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize = {};
uint32_t maxTaskPayloadSize = {};
uint32_t maxTaskSharedMemorySize = {};
uint32_t maxTaskPayloadAndSharedMemorySize = {};
uint32_t maxMeshWorkGroupTotalCount = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupCount = {};
uint32_t maxMeshWorkGroupInvocations = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupSize = {};
uint32_t maxMeshSharedMemorySize = {};
uint32_t maxMeshPayloadAndSharedMemorySize = {};
uint32_t maxMeshOutputMemorySize = {};
uint32_t maxMeshPayloadAndOutputMemorySize = {};
uint32_t maxMeshOutputComponents = {};
uint32_t maxMeshOutputVertices = {};
uint32_t maxMeshOutputPrimitives = {};
uint32_t maxMeshOutputLayers = {};
uint32_t maxMeshMultiviewViewCount = {};
uint32_t meshOutputPerVertexGranularity = {};
uint32_t meshOutputPerPrimitiveGranularity = {};
uint32_t maxPreferredTaskWorkGroupInvocations = {};
uint32_t maxPreferredMeshWorkGroupInvocations = {};
VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput = {};
VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput = {};
VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput = {};
VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesEXT>
{
using Type = PhysicalDeviceMeshShaderPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceMeshShaderPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderPropertiesNV.html
struct PhysicalDeviceMeshShaderPropertiesNV
{
using NativeType = VkPhysicalDeviceMeshShaderPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV(uint32_t maxDrawMeshTasksCount_ = {}, uint32_t maxTaskWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxTaskWorkGroupSize_ = {}, uint32_t maxTaskTotalMemorySize_ = {}, uint32_t maxTaskOutputCount_ = {}, uint32_t maxMeshWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxMeshWorkGroupSize_ = {}, uint32_t maxMeshTotalMemorySize_ = {}, uint32_t maxMeshOutputVertices_ = {}, uint32_t maxMeshOutputPrimitives_ = {}, uint32_t maxMeshMultiviewViewCount_ = {}, uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxDrawMeshTasksCount{ maxDrawMeshTasksCount_ }, maxTaskWorkGroupInvocations{ maxTaskWorkGroupInvocations_ }, maxTaskWorkGroupSize{ maxTaskWorkGroupSize_ }, maxTaskTotalMemorySize{ maxTaskTotalMemorySize_ }, maxTaskOutputCount{ maxTaskOutputCount_ }, maxMeshWorkGroupInvocations{ maxMeshWorkGroupInvocations_ }, maxMeshWorkGroupSize{ maxMeshWorkGroupSize_ }, maxMeshTotalMemorySize{ maxMeshTotalMemorySize_ }, maxMeshOutputVertices{ maxMeshOutputVertices_ }, maxMeshOutputPrimitives{ maxMeshOutputPrimitives_ }, maxMeshMultiviewViewCount{ maxMeshMultiviewViewCount_ }, meshOutputPerVertexGranularity{ meshOutputPerVertexGranularity_ }, meshOutputPerPrimitiveGranularity{ meshOutputPerPrimitiveGranularity_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
}
operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
}
operator VkPhysicalDeviceMeshShaderPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
}
operator VkPhysicalDeviceMeshShaderPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxDrawMeshTasksCount, maxTaskWorkGroupInvocations, maxTaskWorkGroupSize, maxTaskTotalMemorySize, maxTaskOutputCount, maxMeshWorkGroupInvocations, maxMeshWorkGroupSize, maxMeshTotalMemorySize, maxMeshOutputVertices, maxMeshOutputPrimitives, maxMeshMultiviewViewCount, meshOutputPerVertexGranularity, meshOutputPerPrimitiveGranularity );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount )
&& ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations )
&& ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize )
&& ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize )
&& ( maxTaskOutputCount == rhs.maxTaskOutputCount )
&& ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations )
&& ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize )
&& ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize )
&& ( maxMeshOutputVertices == rhs.maxMeshOutputVertices )
&& ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives )
&& ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount )
&& ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity )
&& ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity );
#endif
}
bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
void * pNext = {};
uint32_t maxDrawMeshTasksCount = {};
uint32_t maxTaskWorkGroupInvocations = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize = {};
uint32_t maxTaskTotalMemorySize = {};
uint32_t maxTaskOutputCount = {};
uint32_t maxMeshWorkGroupInvocations = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupSize = {};
uint32_t maxMeshTotalMemorySize = {};
uint32_t maxMeshOutputVertices = {};
uint32_t maxMeshOutputPrimitives = {};
uint32_t maxMeshMultiviewViewCount = {};
uint32_t meshOutputPerVertexGranularity = {};
uint32_t meshOutputPerPrimitiveGranularity = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesNV>
{
using Type = PhysicalDeviceMeshShaderPropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceMultiDrawFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiDrawFeaturesEXT.html
struct PhysicalDeviceMultiDrawFeaturesEXT
{
using NativeType = VkPhysicalDeviceMultiDrawFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, multiDraw{ multiDraw_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiDrawFeaturesEXT( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMultiDrawFeaturesEXT( *reinterpret_cast<PhysicalDeviceMultiDrawFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceMultiDrawFeaturesEXT & operator=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMultiDrawFeaturesEXT & operator=( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setMultiDraw( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ ) VULKAN_HPP_NOEXCEPT
{
multiDraw = multiDraw_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiDrawFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultiDrawFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMultiDrawFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMultiDrawFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMultiDrawFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMultiDrawFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, multiDraw );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMultiDrawFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( multiDraw == rhs.multiDraw );
#endif
}
bool operator!=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 multiDraw = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiDrawFeaturesEXT>
{
using Type = PhysicalDeviceMultiDrawFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceMultiDrawPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiDrawPropertiesEXT.html
struct PhysicalDeviceMultiDrawPropertiesEXT
{
using NativeType = VkPhysicalDeviceMultiDrawPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT(uint32_t maxMultiDrawCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxMultiDrawCount{ maxMultiDrawCount_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiDrawPropertiesEXT( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMultiDrawPropertiesEXT( *reinterpret_cast<PhysicalDeviceMultiDrawPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceMultiDrawPropertiesEXT & operator=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMultiDrawPropertiesEXT & operator=( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiDrawPropertiesEXT*>( this );
}
operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultiDrawPropertiesEXT*>( this );
}
operator VkPhysicalDeviceMultiDrawPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMultiDrawPropertiesEXT*>( this );
}
operator VkPhysicalDeviceMultiDrawPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMultiDrawPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxMultiDrawCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMultiDrawPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxMultiDrawCount == rhs.maxMultiDrawCount );
#endif
}
bool operator!=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT;
void * pNext = {};
uint32_t maxMultiDrawCount = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiDrawPropertiesEXT>
{
using Type = PhysicalDeviceMultiDrawPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.html
struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT
{
using NativeType = VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, multisampledRenderToSingleSampled{ multisampledRenderToSingleSampled_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( *reinterpret_cast<PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & operator=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & operator=( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & setMultisampledRenderToSingleSampled( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ ) VULKAN_HPP_NOEXCEPT
{
multisampledRenderToSingleSampled = multisampledRenderToSingleSampled_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, multisampledRenderToSingleSampled );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( multisampledRenderToSingleSampled == rhs.multisampledRenderToSingleSampled );
#endif
}
bool operator!=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>
{
using Type = PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceMultiviewFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewFeatures.html
struct PhysicalDeviceMultiviewFeatures
{
using NativeType = VkPhysicalDeviceMultiviewFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures(VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, multiview{ multiview_ }, multiviewGeometryShader{ multiviewGeometryShader_ }, multiviewTessellationShader{ multiviewTessellationShader_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMultiviewFeatures( *reinterpret_cast<PhysicalDeviceMultiviewFeatures const *>( &rhs ) )
{}
PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
{
multiview = multiview_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
{
multiviewGeometryShader = multiviewGeometryShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
{
multiviewTessellationShader = multiviewTessellationShader_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures*>( this );
}
operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures*>( this );
}
operator VkPhysicalDeviceMultiviewFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures*>( this );
}
operator VkPhysicalDeviceMultiviewFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMultiviewFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, multiview, multiviewGeometryShader, multiviewTessellationShader );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( multiview == rhs.multiview )
&& ( multiviewGeometryShader == rhs.multiviewGeometryShader )
&& ( multiviewTessellationShader == rhs.multiviewTessellationShader );
#endif
}
bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewFeatures>
{
using Type = PhysicalDeviceMultiviewFeatures;
};
using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
// wrapper struct for struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX.html
struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
{
using NativeType = VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, perViewPositionAllComponents{ perViewPositionAllComponents_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( *reinterpret_cast<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs ) )
{}
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
}
operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
}
operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
}
operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, perViewPositionAllComponents );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default;
#else
bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
#endif
}
bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>
{
using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
};
// wrapper struct for struct VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.html
struct PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM
{
using NativeType = VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, multiviewPerViewRenderAreas{ multiviewPerViewRenderAreas_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( *reinterpret_cast<PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const *>( &rhs ) )
{}
PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & operator=( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & setMultiviewPerViewRenderAreas( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT
{
multiviewPerViewRenderAreas = multiviewPerViewRenderAreas_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, multiviewPerViewRenderAreas );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( multiviewPerViewRenderAreas == rhs.multiviewPerViewRenderAreas );
#endif
}
bool operator!=( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM>
{
using Type = PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM.html
struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM
{
using NativeType = VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, multiviewPerViewViewports{ multiviewPerViewViewports_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( *reinterpret_cast<PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const *>( &rhs ) )
{}
PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & operator=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & setMultiviewPerViewViewports( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports_ ) VULKAN_HPP_NOEXCEPT
{
multiviewPerViewViewports = multiviewPerViewViewports_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, multiviewPerViewViewports );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( multiviewPerViewViewports == rhs.multiviewPerViewViewports );
#endif
}
bool operator!=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM>
{
using Type = PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceMultiviewProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewProperties.html
struct PhysicalDeviceMultiviewProperties
{
using NativeType = VkPhysicalDeviceMultiviewProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties(uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxMultiviewViewCount{ maxMultiviewViewCount_ }, maxMultiviewInstanceIndex{ maxMultiviewInstanceIndex_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMultiviewProperties( *reinterpret_cast<PhysicalDeviceMultiviewProperties const *>( &rhs ) )
{}
PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties*>( this );
}
operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties*>( this );
}
operator VkPhysicalDeviceMultiviewProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMultiviewProperties*>( this );
}
operator VkPhysicalDeviceMultiviewProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMultiviewProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxMultiviewViewCount, maxMultiviewInstanceIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
&& ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
#endif
}
bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
void * pNext = {};
uint32_t maxMultiviewViewCount = {};
uint32_t maxMultiviewInstanceIndex = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewProperties>
{
using Type = PhysicalDeviceMultiviewProperties;
};
using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
// wrapper struct for struct VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT.html
struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT
{
using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, mutableDescriptorType{ mutableDescriptorType_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMutableDescriptorTypeFeaturesEXT( *reinterpret_cast<PhysicalDeviceMutableDescriptorTypeFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT
{
mutableDescriptorType = mutableDescriptorType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT*>( this );
}
operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, mutableDescriptorType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( mutableDescriptorType == rhs.mutableDescriptorType );
#endif
}
bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT>
{
using Type = PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
};
using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
// wrapper struct for struct VkPhysicalDeviceNestedCommandBufferFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceNestedCommandBufferFeaturesEXT.html
struct PhysicalDeviceNestedCommandBufferFeaturesEXT
{
using NativeType = VkPhysicalDeviceNestedCommandBufferFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, nestedCommandBuffer{ nestedCommandBuffer_ }, nestedCommandBufferRendering{ nestedCommandBufferRendering_ }, nestedCommandBufferSimultaneousUse{ nestedCommandBufferSimultaneousUse_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferFeaturesEXT( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceNestedCommandBufferFeaturesEXT( VkPhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceNestedCommandBufferFeaturesEXT( *reinterpret_cast<PhysicalDeviceNestedCommandBufferFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceNestedCommandBufferFeaturesEXT & operator=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceNestedCommandBufferFeaturesEXT & operator=( VkPhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setNestedCommandBuffer( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBuffer_ ) VULKAN_HPP_NOEXCEPT
{
nestedCommandBuffer = nestedCommandBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setNestedCommandBufferRendering( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering_ ) VULKAN_HPP_NOEXCEPT
{
nestedCommandBufferRendering = nestedCommandBufferRendering_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setNestedCommandBufferSimultaneousUse( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse_ ) VULKAN_HPP_NOEXCEPT
{
nestedCommandBufferSimultaneousUse = nestedCommandBufferSimultaneousUse_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceNestedCommandBufferFeaturesEXT*>( this );
}
operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceNestedCommandBufferFeaturesEXT*>( this );
}
operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceNestedCommandBufferFeaturesEXT*>( this );
}
operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceNestedCommandBufferFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, nestedCommandBuffer, nestedCommandBufferRendering, nestedCommandBufferSimultaneousUse );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceNestedCommandBufferFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( nestedCommandBuffer == rhs.nestedCommandBuffer )
&& ( nestedCommandBufferRendering == rhs.nestedCommandBufferRendering )
&& ( nestedCommandBufferSimultaneousUse == rhs.nestedCommandBufferSimultaneousUse );
#endif
}
bool operator!=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBuffer = {};
VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering = {};
VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT>
{
using Type = PhysicalDeviceNestedCommandBufferFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceNestedCommandBufferPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceNestedCommandBufferPropertiesEXT.html
struct PhysicalDeviceNestedCommandBufferPropertiesEXT
{
using NativeType = VkPhysicalDeviceNestedCommandBufferPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferPropertiesEXT(uint32_t maxCommandBufferNestingLevel_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxCommandBufferNestingLevel{ maxCommandBufferNestingLevel_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferPropertiesEXT( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceNestedCommandBufferPropertiesEXT( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceNestedCommandBufferPropertiesEXT( *reinterpret_cast<PhysicalDeviceNestedCommandBufferPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceNestedCommandBufferPropertiesEXT*>( this );
}
operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceNestedCommandBufferPropertiesEXT*>( this );
}
operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceNestedCommandBufferPropertiesEXT*>( this );
}
operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceNestedCommandBufferPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxCommandBufferNestingLevel );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceNestedCommandBufferPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxCommandBufferNestingLevel == rhs.maxCommandBufferNestingLevel );
#endif
}
bool operator!=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT;
void * pNext = {};
uint32_t maxCommandBufferNestingLevel = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT>
{
using Type = PhysicalDeviceNestedCommandBufferPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT.html
struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT
{
using NativeType = VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, nonSeamlessCubeMap{ nonSeamlessCubeMap_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( *reinterpret_cast<PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setNonSeamlessCubeMap( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ ) VULKAN_HPP_NOEXCEPT
{
nonSeamlessCubeMap = nonSeamlessCubeMap_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT*>( this );
}
operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT*>( this );
}
operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT*>( this );
}
operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, nonSeamlessCubeMap );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( nonSeamlessCubeMap == rhs.nonSeamlessCubeMap );
#endif
}
bool operator!=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT>
{
using Type = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceOpacityMicromapFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpacityMicromapFeaturesEXT.html
struct PhysicalDeviceOpacityMicromapFeaturesEXT
{
using NativeType = VkPhysicalDeviceOpacityMicromapFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 micromap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, micromap{ micromap_ }, micromapCaptureReplay{ micromapCaptureReplay_ }, micromapHostCommands{ micromapHostCommands_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceOpacityMicromapFeaturesEXT( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceOpacityMicromapFeaturesEXT( *reinterpret_cast<PhysicalDeviceOpacityMicromapFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromap( VULKAN_HPP_NAMESPACE::Bool32 micromap_ ) VULKAN_HPP_NOEXCEPT
{
micromap = micromap_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromapCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
micromapCaptureReplay = micromapCaptureReplay_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromapHostCommands( VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ ) VULKAN_HPP_NOEXCEPT
{
micromapHostCommands = micromapHostCommands_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceOpacityMicromapFeaturesEXT*>( this );
}
operator VkPhysicalDeviceOpacityMicromapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceOpacityMicromapFeaturesEXT*>( this );
}
operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceOpacityMicromapFeaturesEXT*>( this );
}
operator VkPhysicalDeviceOpacityMicromapFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceOpacityMicromapFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, micromap, micromapCaptureReplay, micromapHostCommands );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceOpacityMicromapFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( micromap == rhs.micromap )
&& ( micromapCaptureReplay == rhs.micromapCaptureReplay )
&& ( micromapHostCommands == rhs.micromapHostCommands );
#endif
}
bool operator!=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 micromap = {};
VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT>
{
using Type = PhysicalDeviceOpacityMicromapFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceOpacityMicromapPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpacityMicromapPropertiesEXT.html
struct PhysicalDeviceOpacityMicromapPropertiesEXT
{
using NativeType = VkPhysicalDeviceOpacityMicromapPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT(uint32_t maxOpacity2StateSubdivisionLevel_ = {}, uint32_t maxOpacity4StateSubdivisionLevel_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxOpacity2StateSubdivisionLevel{ maxOpacity2StateSubdivisionLevel_ }, maxOpacity4StateSubdivisionLevel{ maxOpacity4StateSubdivisionLevel_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceOpacityMicromapPropertiesEXT( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceOpacityMicromapPropertiesEXT( *reinterpret_cast<PhysicalDeviceOpacityMicromapPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceOpacityMicromapPropertiesEXT*>( this );
}
operator VkPhysicalDeviceOpacityMicromapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceOpacityMicromapPropertiesEXT*>( this );
}
operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceOpacityMicromapPropertiesEXT*>( this );
}
operator VkPhysicalDeviceOpacityMicromapPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceOpacityMicromapPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxOpacity2StateSubdivisionLevel, maxOpacity4StateSubdivisionLevel );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceOpacityMicromapPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxOpacity2StateSubdivisionLevel == rhs.maxOpacity2StateSubdivisionLevel )
&& ( maxOpacity4StateSubdivisionLevel == rhs.maxOpacity4StateSubdivisionLevel );
#endif
}
bool operator!=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT;
void * pNext = {};
uint32_t maxOpacity2StateSubdivisionLevel = {};
uint32_t maxOpacity4StateSubdivisionLevel = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT>
{
using Type = PhysicalDeviceOpacityMicromapPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceOpticalFlowFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpticalFlowFeaturesNV.html
struct PhysicalDeviceOpticalFlowFeaturesNV
{
using NativeType = VkPhysicalDeviceOpticalFlowFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, opticalFlow{ opticalFlow_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceOpticalFlowFeaturesNV( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceOpticalFlowFeaturesNV( *reinterpret_cast<PhysicalDeviceOpticalFlowFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceOpticalFlowFeaturesNV & operator=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceOpticalFlowFeaturesNV & operator=( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setOpticalFlow( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ ) VULKAN_HPP_NOEXCEPT
{
opticalFlow = opticalFlow_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceOpticalFlowFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceOpticalFlowFeaturesNV*>( this );
}
operator VkPhysicalDeviceOpticalFlowFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceOpticalFlowFeaturesNV*>( this );
}
operator VkPhysicalDeviceOpticalFlowFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceOpticalFlowFeaturesNV*>( this );
}
operator VkPhysicalDeviceOpticalFlowFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceOpticalFlowFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, opticalFlow );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceOpticalFlowFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( opticalFlow == rhs.opticalFlow );
#endif
}
bool operator!=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 opticalFlow = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceOpticalFlowFeaturesNV>
{
using Type = PhysicalDeviceOpticalFlowFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceOpticalFlowPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpticalFlowPropertiesNV.html
struct PhysicalDeviceOpticalFlowPropertiesNV
{
using NativeType = VkPhysicalDeviceOpticalFlowPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV(VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes_ = {}, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hintSupported_ = {}, VULKAN_HPP_NAMESPACE::Bool32 costSupported_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported_ = {}, VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported_ = {}, uint32_t minWidth_ = {}, uint32_t minHeight_ = {}, uint32_t maxWidth_ = {}, uint32_t maxHeight_ = {}, uint32_t maxNumRegionsOfInterest_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, supportedOutputGridSizes{ supportedOutputGridSizes_ }, supportedHintGridSizes{ supportedHintGridSizes_ }, hintSupported{ hintSupported_ }, costSupported{ costSupported_ }, bidirectionalFlowSupported{ bidirectionalFlowSupported_ }, globalFlowSupported{ globalFlowSupported_ }, minWidth{ minWidth_ }, minHeight{ minHeight_ }, maxWidth{ maxWidth_ }, maxHeight{ maxHeight_ }, maxNumRegionsOfInterest{ maxNumRegionsOfInterest_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceOpticalFlowPropertiesNV( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceOpticalFlowPropertiesNV( *reinterpret_cast<PhysicalDeviceOpticalFlowPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceOpticalFlowPropertiesNV & operator=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceOpticalFlowPropertiesNV & operator=( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceOpticalFlowPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceOpticalFlowPropertiesNV*>( this );
}
operator VkPhysicalDeviceOpticalFlowPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceOpticalFlowPropertiesNV*>( this );
}
operator VkPhysicalDeviceOpticalFlowPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceOpticalFlowPropertiesNV*>( this );
}
operator VkPhysicalDeviceOpticalFlowPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceOpticalFlowPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &, VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, supportedOutputGridSizes, supportedHintGridSizes, hintSupported, costSupported, bidirectionalFlowSupported, globalFlowSupported, minWidth, minHeight, maxWidth, maxHeight, maxNumRegionsOfInterest );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceOpticalFlowPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( supportedOutputGridSizes == rhs.supportedOutputGridSizes )
&& ( supportedHintGridSizes == rhs.supportedHintGridSizes )
&& ( hintSupported == rhs.hintSupported )
&& ( costSupported == rhs.costSupported )
&& ( bidirectionalFlowSupported == rhs.bidirectionalFlowSupported )
&& ( globalFlowSupported == rhs.globalFlowSupported )
&& ( minWidth == rhs.minWidth )
&& ( minHeight == rhs.minHeight )
&& ( maxWidth == rhs.maxWidth )
&& ( maxHeight == rhs.maxHeight )
&& ( maxNumRegionsOfInterest == rhs.maxNumRegionsOfInterest );
#endif
}
bool operator!=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes = {};
VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes = {};
VULKAN_HPP_NAMESPACE::Bool32 hintSupported = {};
VULKAN_HPP_NAMESPACE::Bool32 costSupported = {};
VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported = {};
VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported = {};
uint32_t minWidth = {};
uint32_t minHeight = {};
uint32_t maxWidth = {};
uint32_t maxHeight = {};
uint32_t maxNumRegionsOfInterest = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceOpticalFlowPropertiesNV>
{
using Type = PhysicalDeviceOpticalFlowPropertiesNV;
};
// wrapper struct for struct VkPhysicalDevicePCIBusInfoPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePCIBusInfoPropertiesEXT.html
struct PhysicalDevicePCIBusInfoPropertiesEXT
{
using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT(uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pciDomain{ pciDomain_ }, pciBus{ pciBus_ }, pciDevice{ pciDevice_ }, pciFunction{ pciFunction_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePCIBusInfoPropertiesEXT( *reinterpret_cast<PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs ) )
{}
PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
}
operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
}
operator VkPhysicalDevicePCIBusInfoPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
}
operator VkPhysicalDevicePCIBusInfoPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pciDomain, pciBus, pciDevice, pciFunction );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pciDomain == rhs.pciDomain )
&& ( pciBus == rhs.pciBus )
&& ( pciDevice == rhs.pciDevice )
&& ( pciFunction == rhs.pciFunction );
#endif
}
bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
void * pNext = {};
uint32_t pciDomain = {};
uint32_t pciBus = {};
uint32_t pciDevice = {};
uint32_t pciFunction = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePciBusInfoPropertiesEXT>
{
using Type = PhysicalDevicePCIBusInfoPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT.html
struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT
{
using NativeType = VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pageableDeviceLocalMemory{ pageableDeviceLocalMemory_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( *reinterpret_cast<PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *>( &rhs ) )
{}
PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPageableDeviceLocalMemory( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ ) VULKAN_HPP_NOEXCEPT
{
pageableDeviceLocalMemory = pageableDeviceLocalMemory_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT*>( this );
}
operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT*>( this );
}
operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT*>( this );
}
operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pageableDeviceLocalMemory );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory );
#endif
}
bool operator!=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>
{
using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV.html
struct PhysicalDevicePartitionedAccelerationStructureFeaturesNV
{
using NativeType = VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructureFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 partitionedAccelerationStructure_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, partitionedAccelerationStructure{ partitionedAccelerationStructure_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructureFeaturesNV( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePartitionedAccelerationStructureFeaturesNV( VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePartitionedAccelerationStructureFeaturesNV( *reinterpret_cast<PhysicalDevicePartitionedAccelerationStructureFeaturesNV const *>( &rhs ) )
{}
PhysicalDevicePartitionedAccelerationStructureFeaturesNV & operator=( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePartitionedAccelerationStructureFeaturesNV & operator=( VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePartitionedAccelerationStructureFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePartitionedAccelerationStructureFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePartitionedAccelerationStructureFeaturesNV & setPartitionedAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 partitionedAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
partitionedAccelerationStructure = partitionedAccelerationStructure_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV*>( this );
}
operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV*>( this );
}
operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV*>( this );
}
operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, partitionedAccelerationStructure );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( partitionedAccelerationStructure == rhs.partitionedAccelerationStructure );
#endif
}
bool operator!=( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 partitionedAccelerationStructure = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV>
{
using Type = PhysicalDevicePartitionedAccelerationStructureFeaturesNV;
};
// wrapper struct for struct VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV.html
struct PhysicalDevicePartitionedAccelerationStructurePropertiesNV
{
using NativeType = VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructurePropertiesNV(uint32_t maxPartitionCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxPartitionCount{ maxPartitionCount_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructurePropertiesNV( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePartitionedAccelerationStructurePropertiesNV( VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePartitionedAccelerationStructurePropertiesNV( *reinterpret_cast<PhysicalDevicePartitionedAccelerationStructurePropertiesNV const *>( &rhs ) )
{}
PhysicalDevicePartitionedAccelerationStructurePropertiesNV & operator=( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePartitionedAccelerationStructurePropertiesNV & operator=( VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePartitionedAccelerationStructurePropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV*>( this );
}
operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV*>( this );
}
operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV*>( this );
}
operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxPartitionCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxPartitionCount == rhs.maxPartitionCount );
#endif
}
bool operator!=( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV;
void * pNext = {};
uint32_t maxPartitionCount = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV>
{
using Type = PhysicalDevicePartitionedAccelerationStructurePropertiesNV;
};
// wrapper struct for struct VkPhysicalDevicePerStageDescriptorSetFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerStageDescriptorSetFeaturesNV.html
struct PhysicalDevicePerStageDescriptorSetFeaturesNV
{
using NativeType = VkPhysicalDevicePerStageDescriptorSetFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePerStageDescriptorSetFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, perStageDescriptorSet{ perStageDescriptorSet_ }, dynamicPipelineLayout{ dynamicPipelineLayout_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePerStageDescriptorSetFeaturesNV( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePerStageDescriptorSetFeaturesNV( VkPhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePerStageDescriptorSetFeaturesNV( *reinterpret_cast<PhysicalDevicePerStageDescriptorSetFeaturesNV const *>( &rhs ) )
{}
PhysicalDevicePerStageDescriptorSetFeaturesNV & operator=( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePerStageDescriptorSetFeaturesNV & operator=( VkPhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerStageDescriptorSetFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setPerStageDescriptorSet( VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet_ ) VULKAN_HPP_NOEXCEPT
{
perStageDescriptorSet = perStageDescriptorSet_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setDynamicPipelineLayout( VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
{
dynamicPipelineLayout = dynamicPipelineLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePerStageDescriptorSetFeaturesNV*>( this );
}
operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePerStageDescriptorSetFeaturesNV*>( this );
}
operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePerStageDescriptorSetFeaturesNV*>( this );
}
operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePerStageDescriptorSetFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, perStageDescriptorSet, dynamicPipelineLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePerStageDescriptorSetFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( perStageDescriptorSet == rhs.perStageDescriptorSet )
&& ( dynamicPipelineLayout == rhs.dynamicPipelineLayout );
#endif
}
bool operator!=( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet = {};
VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV>
{
using Type = PhysicalDevicePerStageDescriptorSetFeaturesNV;
};
// wrapper struct for struct VkPhysicalDevicePerformanceQueryFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerformanceQueryFeaturesKHR.html
struct PhysicalDevicePerformanceQueryFeaturesKHR
{
using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, performanceCounterQueryPools{ performanceCounterQueryPools_ }, performanceCounterMultipleQueryPools{ performanceCounterMultipleQueryPools_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePerformanceQueryFeaturesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs ) )
{}
PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT
{
performanceCounterQueryPools = performanceCounterQueryPools_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT
{
performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
}
operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
}
operator VkPhysicalDevicePerformanceQueryFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
}
operator VkPhysicalDevicePerformanceQueryFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, performanceCounterQueryPools, performanceCounterMultipleQueryPools );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( performanceCounterQueryPools == rhs.performanceCounterQueryPools )
&& ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools );
#endif
}
bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {};
VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR>
{
using Type = PhysicalDevicePerformanceQueryFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDevicePerformanceQueryPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerformanceQueryPropertiesKHR.html
struct PhysicalDevicePerformanceQueryPropertiesKHR
{
using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, allowCommandBufferQueryCopies{ allowCommandBufferQueryCopies_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePerformanceQueryPropertiesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs ) )
{}
PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
}
operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
}
operator VkPhysicalDevicePerformanceQueryPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
}
operator VkPhysicalDevicePerformanceQueryPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, allowCommandBufferQueryCopies );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies );
#endif
}
bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR>
{
using Type = PhysicalDevicePerformanceQueryPropertiesKHR;
};
// wrapper struct for struct VkPhysicalDevicePipelineBinaryFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineBinaryFeaturesKHR.html
struct PhysicalDevicePipelineBinaryFeaturesKHR
{
using NativeType = VkPhysicalDevicePipelineBinaryFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaries_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineBinaries{ pipelineBinaries_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryFeaturesKHR( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePipelineBinaryFeaturesKHR( VkPhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePipelineBinaryFeaturesKHR( *reinterpret_cast<PhysicalDevicePipelineBinaryFeaturesKHR const *>( &rhs ) )
{}
PhysicalDevicePipelineBinaryFeaturesKHR & operator=( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePipelineBinaryFeaturesKHR & operator=( VkPhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR & setPipelineBinaries( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBinaries = pipelineBinaries_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePipelineBinaryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelineBinaryFeaturesKHR*>( this );
}
operator VkPhysicalDevicePipelineBinaryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePipelineBinaryFeaturesKHR*>( this );
}
operator VkPhysicalDevicePipelineBinaryFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePipelineBinaryFeaturesKHR*>( this );
}
operator VkPhysicalDevicePipelineBinaryFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePipelineBinaryFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineBinaries );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePipelineBinaryFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineBinaries == rhs.pipelineBinaries );
#endif
}
bool operator!=( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaries = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR>
{
using Type = PhysicalDevicePipelineBinaryFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDevicePipelineBinaryPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineBinaryPropertiesKHR.html
struct PhysicalDevicePipelineBinaryPropertiesKHR
{
using NativeType = VkPhysicalDevicePipelineBinaryPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCache_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCacheControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrefersInternalCache_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrecompiledInternalCache_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryCompressedData_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineBinaryInternalCache{ pipelineBinaryInternalCache_ }, pipelineBinaryInternalCacheControl{ pipelineBinaryInternalCacheControl_ }, pipelineBinaryPrefersInternalCache{ pipelineBinaryPrefersInternalCache_ }, pipelineBinaryPrecompiledInternalCache{ pipelineBinaryPrecompiledInternalCache_ }, pipelineBinaryCompressedData{ pipelineBinaryCompressedData_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryPropertiesKHR( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePipelineBinaryPropertiesKHR( VkPhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePipelineBinaryPropertiesKHR( *reinterpret_cast<PhysicalDevicePipelineBinaryPropertiesKHR const *>( &rhs ) )
{}
PhysicalDevicePipelineBinaryPropertiesKHR & operator=( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePipelineBinaryPropertiesKHR & operator=( VkPhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryPropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDevicePipelineBinaryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelineBinaryPropertiesKHR*>( this );
}
operator VkPhysicalDevicePipelineBinaryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePipelineBinaryPropertiesKHR*>( this );
}
operator VkPhysicalDevicePipelineBinaryPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePipelineBinaryPropertiesKHR*>( this );
}
operator VkPhysicalDevicePipelineBinaryPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePipelineBinaryPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineBinaryInternalCache, pipelineBinaryInternalCacheControl, pipelineBinaryPrefersInternalCache, pipelineBinaryPrecompiledInternalCache, pipelineBinaryCompressedData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePipelineBinaryPropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineBinaryInternalCache == rhs.pipelineBinaryInternalCache )
&& ( pipelineBinaryInternalCacheControl == rhs.pipelineBinaryInternalCacheControl )
&& ( pipelineBinaryPrefersInternalCache == rhs.pipelineBinaryPrefersInternalCache )
&& ( pipelineBinaryPrecompiledInternalCache == rhs.pipelineBinaryPrecompiledInternalCache )
&& ( pipelineBinaryCompressedData == rhs.pipelineBinaryCompressedData );
#endif
}
bool operator!=( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCache = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCacheControl = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrefersInternalCache = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrecompiledInternalCache = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryCompressedData = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR>
{
using Type = PhysicalDevicePipelineBinaryPropertiesKHR;
};
// wrapper struct for struct VkPhysicalDevicePipelineCreationCacheControlFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineCreationCacheControlFeatures.html
struct PhysicalDevicePipelineCreationCacheControlFeatures
{
using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures(VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineCreationCacheControl{ pipelineCreationCacheControl_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePipelineCreationCacheControlFeatures( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePipelineCreationCacheControlFeatures( *reinterpret_cast<PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs ) )
{}
PhysicalDevicePipelineCreationCacheControlFeatures & operator=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePipelineCreationCacheControlFeatures & operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
{
pipelineCreationCacheControl = pipelineCreationCacheControl_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeatures*>( this );
}
operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePipelineCreationCacheControlFeatures*>( this );
}
operator VkPhysicalDevicePipelineCreationCacheControlFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeatures*>( this );
}
operator VkPhysicalDevicePipelineCreationCacheControlFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePipelineCreationCacheControlFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineCreationCacheControl );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeatures const & ) const = default;
#else
bool operator==( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl );
#endif
}
bool operator!=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures>
{
using Type = PhysicalDevicePipelineCreationCacheControlFeatures;
};
using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures;
// wrapper struct for struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR.html
struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
{
using NativeType = VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineExecutableInfo{ pipelineExecutableInfo_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( *reinterpret_cast<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs ) )
{}
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
{
pipelineExecutableInfo = pipelineExecutableInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
}
operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
}
operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
}
operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineExecutableInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineExecutableInfo == rhs.pipelineExecutableInfo );
#endif
}
bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR>
{
using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT.html
struct PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT
{
using NativeType = VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineLibraryGroupHandles{ pipelineLibraryGroupHandles_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( *reinterpret_cast<PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const *>( &rhs ) )
{}
PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & operator=( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & operator=( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & setPipelineLibraryGroupHandles( VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles_ ) VULKAN_HPP_NOEXCEPT
{
pipelineLibraryGroupHandles = pipelineLibraryGroupHandles_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT*>( this );
}
operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT*>( this );
}
operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT*>( this );
}
operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineLibraryGroupHandles );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineLibraryGroupHandles == rhs.pipelineLibraryGroupHandles );
#endif
}
bool operator!=( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT>
{
using Type = PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDevicePipelineOpacityMicromapFeaturesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineOpacityMicromapFeaturesARM.html
struct PhysicalDevicePipelineOpacityMicromapFeaturesARM
{
using NativeType = VkPhysicalDevicePipelineOpacityMicromapFeaturesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineOpacityMicromapFeaturesARM(VULKAN_HPP_NAMESPACE::Bool32 pipelineOpacityMicromap_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineOpacityMicromap{ pipelineOpacityMicromap_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineOpacityMicromapFeaturesARM( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePipelineOpacityMicromapFeaturesARM( VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePipelineOpacityMicromapFeaturesARM( *reinterpret_cast<PhysicalDevicePipelineOpacityMicromapFeaturesARM const *>( &rhs ) )
{}
PhysicalDevicePipelineOpacityMicromapFeaturesARM & operator=( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePipelineOpacityMicromapFeaturesARM & operator=( VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineOpacityMicromapFeaturesARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineOpacityMicromapFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineOpacityMicromapFeaturesARM & setPipelineOpacityMicromap( VULKAN_HPP_NAMESPACE::Bool32 pipelineOpacityMicromap_ ) VULKAN_HPP_NOEXCEPT
{
pipelineOpacityMicromap = pipelineOpacityMicromap_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelineOpacityMicromapFeaturesARM*>( this );
}
operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePipelineOpacityMicromapFeaturesARM*>( this );
}
operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePipelineOpacityMicromapFeaturesARM*>( this );
}
operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePipelineOpacityMicromapFeaturesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineOpacityMicromap );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & ) const = default;
#else
bool operator==( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineOpacityMicromap == rhs.pipelineOpacityMicromap );
#endif
}
bool operator!=( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineOpacityMicromap = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM>
{
using Type = PhysicalDevicePipelineOpacityMicromapFeaturesARM;
};
// wrapper struct for struct VkPhysicalDevicePipelinePropertiesFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelinePropertiesFeaturesEXT.html
struct PhysicalDevicePipelinePropertiesFeaturesEXT
{
using NativeType = VkPhysicalDevicePipelinePropertiesFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelinePropertiesIdentifier{ pipelinePropertiesIdentifier_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePipelinePropertiesFeaturesEXT( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePipelinePropertiesFeaturesEXT( *reinterpret_cast<PhysicalDevicePipelinePropertiesFeaturesEXT const *>( &rhs ) )
{}
PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & setPipelinePropertiesIdentifier( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ ) VULKAN_HPP_NOEXCEPT
{
pipelinePropertiesIdentifier = pipelinePropertiesIdentifier_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelinePropertiesFeaturesEXT*>( this );
}
operator VkPhysicalDevicePipelinePropertiesFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePipelinePropertiesFeaturesEXT*>( this );
}
operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePipelinePropertiesFeaturesEXT*>( this );
}
operator VkPhysicalDevicePipelinePropertiesFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePipelinePropertiesFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelinePropertiesIdentifier );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePipelinePropertiesFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelinePropertiesIdentifier == rhs.pipelinePropertiesIdentifier );
#endif
}
bool operator!=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT>
{
using Type = PhysicalDevicePipelinePropertiesFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDevicePipelineProtectedAccessFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineProtectedAccessFeatures.html
struct PhysicalDevicePipelineProtectedAccessFeatures
{
using NativeType = VkPhysicalDevicePipelineProtectedAccessFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineProtectedAccessFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeatures(VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineProtectedAccess{ pipelineProtectedAccess_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeatures( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePipelineProtectedAccessFeatures( VkPhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePipelineProtectedAccessFeatures( *reinterpret_cast<PhysicalDevicePipelineProtectedAccessFeatures const *>( &rhs ) )
{}
PhysicalDevicePipelineProtectedAccessFeatures & operator=( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePipelineProtectedAccessFeatures & operator=( VkPhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures & setPipelineProtectedAccess( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT
{
pipelineProtectedAccess = pipelineProtectedAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePipelineProtectedAccessFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelineProtectedAccessFeatures*>( this );
}
operator VkPhysicalDevicePipelineProtectedAccessFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePipelineProtectedAccessFeatures*>( this );
}
operator VkPhysicalDevicePipelineProtectedAccessFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePipelineProtectedAccessFeatures*>( this );
}
operator VkPhysicalDevicePipelineProtectedAccessFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePipelineProtectedAccessFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineProtectedAccess );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePipelineProtectedAccessFeatures const & ) const = default;
#else
bool operator==( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineProtectedAccess == rhs.pipelineProtectedAccess );
#endif
}
bool operator!=( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineProtectedAccessFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineProtectedAccessFeatures>
{
using Type = PhysicalDevicePipelineProtectedAccessFeatures;
};
using PhysicalDevicePipelineProtectedAccessFeaturesEXT = PhysicalDevicePipelineProtectedAccessFeatures;
// wrapper struct for struct VkPhysicalDevicePipelineRobustnessFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineRobustnessFeatures.html
struct PhysicalDevicePipelineRobustnessFeatures
{
using NativeType = VkPhysicalDevicePipelineRobustnessFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeatures(VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineRobustness{ pipelineRobustness_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeatures( PhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePipelineRobustnessFeatures( VkPhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePipelineRobustnessFeatures( *reinterpret_cast<PhysicalDevicePipelineRobustnessFeatures const *>( &rhs ) )
{}
PhysicalDevicePipelineRobustnessFeatures & operator=( PhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePipelineRobustnessFeatures & operator=( VkPhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures & setPipelineRobustness( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT
{
pipelineRobustness = pipelineRobustness_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePipelineRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelineRobustnessFeatures*>( this );
}
operator VkPhysicalDevicePipelineRobustnessFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePipelineRobustnessFeatures*>( this );
}
operator VkPhysicalDevicePipelineRobustnessFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePipelineRobustnessFeatures*>( this );
}
operator VkPhysicalDevicePipelineRobustnessFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePipelineRobustnessFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineRobustness );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePipelineRobustnessFeatures const & ) const = default;
#else
bool operator==( PhysicalDevicePipelineRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineRobustness == rhs.pipelineRobustness );
#endif
}
bool operator!=( PhysicalDevicePipelineRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineRobustnessFeatures>
{
using Type = PhysicalDevicePipelineRobustnessFeatures;
};
using PhysicalDevicePipelineRobustnessFeaturesEXT = PhysicalDevicePipelineRobustnessFeatures;
// wrapper struct for struct VkPhysicalDevicePipelineRobustnessProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineRobustnessProperties.html
struct PhysicalDevicePipelineRobustnessProperties
{
using NativeType = VkPhysicalDevicePipelineRobustnessProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessProperties(VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, defaultRobustnessStorageBuffers{ defaultRobustnessStorageBuffers_ }, defaultRobustnessUniformBuffers{ defaultRobustnessUniformBuffers_ }, defaultRobustnessVertexInputs{ defaultRobustnessVertexInputs_ }, defaultRobustnessImages{ defaultRobustnessImages_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessProperties( PhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePipelineRobustnessProperties( VkPhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePipelineRobustnessProperties( *reinterpret_cast<PhysicalDevicePipelineRobustnessProperties const *>( &rhs ) )
{}
PhysicalDevicePipelineRobustnessProperties & operator=( PhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePipelineRobustnessProperties & operator=( VkPhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDevicePipelineRobustnessProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelineRobustnessProperties*>( this );
}
operator VkPhysicalDevicePipelineRobustnessProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePipelineRobustnessProperties*>( this );
}
operator VkPhysicalDevicePipelineRobustnessProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePipelineRobustnessProperties*>( this );
}
operator VkPhysicalDevicePipelineRobustnessProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePipelineRobustnessProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, defaultRobustnessStorageBuffers, defaultRobustnessUniformBuffers, defaultRobustnessVertexInputs, defaultRobustnessImages );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePipelineRobustnessProperties const & ) const = default;
#else
bool operator==( PhysicalDevicePipelineRobustnessProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers )
&& ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers )
&& ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs )
&& ( defaultRobustnessImages == rhs.defaultRobustnessImages );
#endif
}
bool operator!=( PhysicalDevicePipelineRobustnessProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault;
VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault;
VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault;
VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineRobustnessProperties>
{
using Type = PhysicalDevicePipelineRobustnessProperties;
};
using PhysicalDevicePipelineRobustnessPropertiesEXT = PhysicalDevicePipelineRobustnessProperties;
// wrapper struct for struct VkPhysicalDevicePointClippingProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePointClippingProperties.html
struct PhysicalDevicePointClippingProperties
{
using NativeType = VkPhysicalDevicePointClippingProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties(VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pointClippingBehavior{ pointClippingBehavior_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePointClippingProperties( *reinterpret_cast<PhysicalDevicePointClippingProperties const *>( &rhs ) )
{}
PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties*>( this );
}
operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePointClippingProperties*>( this );
}
operator VkPhysicalDevicePointClippingProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePointClippingProperties*>( this );
}
operator VkPhysicalDevicePointClippingProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePointClippingProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PointClippingBehavior const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pointClippingBehavior );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default;
#else
bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pointClippingBehavior == rhs.pointClippingBehavior );
#endif
}
bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePointClippingProperties>
{
using Type = PhysicalDevicePointClippingProperties;
};
using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkPhysicalDevicePortabilitySubsetFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePortabilitySubsetFeaturesKHR.html
struct PhysicalDevicePortabilitySubsetFeaturesKHR
{
using NativeType = VkPhysicalDevicePortabilitySubsetFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, constantAlphaColorBlendFactors{ constantAlphaColorBlendFactors_ }, events{ events_ }, imageViewFormatReinterpretation{ imageViewFormatReinterpretation_ }, imageViewFormatSwizzle{ imageViewFormatSwizzle_ }, imageView2DOn3DImage{ imageView2DOn3DImage_ }, multisampleArrayImage{ multisampleArrayImage_ }, mutableComparisonSamplers{ mutableComparisonSamplers_ }, pointPolygons{ pointPolygons_ }, samplerMipLodBias{ samplerMipLodBias_ }, separateStencilMaskRef{ separateStencilMaskRef_ }, shaderSampleRateInterpolationFunctions{ shaderSampleRateInterpolationFunctions_ }, tessellationIsolines{ tessellationIsolines_ }, tessellationPointMode{ tessellationPointMode_ }, triangleFans{ triangleFans_ }, vertexAttributeAccessBeyondStride{ vertexAttributeAccessBeyondStride_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePortabilitySubsetFeaturesKHR( *reinterpret_cast<PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs ) )
{}
PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT
{
constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT
{
events = events_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT
{
imageViewFormatReinterpretation = imageViewFormatReinterpretation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT
{
imageViewFormatSwizzle = imageViewFormatSwizzle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT
{
imageView2DOn3DImage = imageView2DOn3DImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT
{
multisampleArrayImage = multisampleArrayImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT
{
mutableComparisonSamplers = mutableComparisonSamplers_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT
{
pointPolygons = pointPolygons_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT
{
samplerMipLodBias = samplerMipLodBias_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT
{
separateStencilMaskRef = separateStencilMaskRef_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions( VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT
{
shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT
{
tessellationIsolines = tessellationIsolines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT
{
tessellationPointMode = tessellationPointMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT
{
triangleFans = triangleFans_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
}
operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
}
operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
}
operator VkPhysicalDevicePortabilitySubsetFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, constantAlphaColorBlendFactors, events, imageViewFormatReinterpretation, imageViewFormatSwizzle, imageView2DOn3DImage, multisampleArrayImage, mutableComparisonSamplers, pointPolygons, samplerMipLodBias, separateStencilMaskRef, shaderSampleRateInterpolationFunctions, tessellationIsolines, tessellationPointMode, triangleFans, vertexAttributeAccessBeyondStride );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors )
&& ( events == rhs.events )
&& ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation )
&& ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle )
&& ( imageView2DOn3DImage == rhs.imageView2DOn3DImage )
&& ( multisampleArrayImage == rhs.multisampleArrayImage )
&& ( mutableComparisonSamplers == rhs.mutableComparisonSamplers )
&& ( pointPolygons == rhs.pointPolygons )
&& ( samplerMipLodBias == rhs.samplerMipLodBias )
&& ( separateStencilMaskRef == rhs.separateStencilMaskRef )
&& ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions )
&& ( tessellationIsolines == rhs.tessellationIsolines )
&& ( tessellationPointMode == rhs.tessellationPointMode )
&& ( triangleFans == rhs.triangleFans )
&& ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride );
#endif
}
bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {};
VULKAN_HPP_NAMESPACE::Bool32 events = {};
VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {};
VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {};
VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {};
VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {};
VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {};
VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {};
VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {};
VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {};
VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {};
VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {};
VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR>
{
using Type = PhysicalDevicePortabilitySubsetFeaturesKHR;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkPhysicalDevicePortabilitySubsetPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePortabilitySubsetPropertiesKHR.html
struct PhysicalDevicePortabilitySubsetPropertiesKHR
{
using NativeType = VkPhysicalDevicePortabilitySubsetPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(uint32_t minVertexInputBindingStrideAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minVertexInputBindingStrideAlignment{ minVertexInputBindingStrideAlignment_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast<PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs ) )
{}
PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
}
operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
}
operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
}
operator VkPhysicalDevicePortabilitySubsetPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minVertexInputBindingStrideAlignment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment );
#endif
}
bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
void * pNext = {};
uint32_t minVertexInputBindingStrideAlignment = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR>
{
using Type = PhysicalDevicePortabilitySubsetPropertiesKHR;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
// wrapper struct for struct VkPhysicalDevicePresentBarrierFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentBarrierFeaturesNV.html
struct PhysicalDevicePresentBarrierFeaturesNV
{
using NativeType = VkPhysicalDevicePresentBarrierFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentBarrier{ presentBarrier_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePresentBarrierFeaturesNV( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePresentBarrierFeaturesNV( *reinterpret_cast<PhysicalDevicePresentBarrierFeaturesNV const *>( &rhs ) )
{}
PhysicalDevicePresentBarrierFeaturesNV & operator=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePresentBarrierFeaturesNV & operator=( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPresentBarrier( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ ) VULKAN_HPP_NOEXCEPT
{
presentBarrier = presentBarrier_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePresentBarrierFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePresentBarrierFeaturesNV*>( this );
}
operator VkPhysicalDevicePresentBarrierFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePresentBarrierFeaturesNV*>( this );
}
operator VkPhysicalDevicePresentBarrierFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePresentBarrierFeaturesNV*>( this );
}
operator VkPhysicalDevicePresentBarrierFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePresentBarrierFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentBarrier );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePresentBarrierFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentBarrier == rhs.presentBarrier );
#endif
}
bool operator!=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentBarrier = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePresentBarrierFeaturesNV>
{
using Type = PhysicalDevicePresentBarrierFeaturesNV;
};
// wrapper struct for struct VkPhysicalDevicePresentId2FeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentId2FeaturesKHR.html
struct PhysicalDevicePresentId2FeaturesKHR
{
using NativeType = VkPhysicalDevicePresentId2FeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentId2FeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentId2FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 presentId2_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentId2{ presentId2_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentId2FeaturesKHR( PhysicalDevicePresentId2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePresentId2FeaturesKHR( VkPhysicalDevicePresentId2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePresentId2FeaturesKHR( *reinterpret_cast<PhysicalDevicePresentId2FeaturesKHR const *>( &rhs ) )
{}
PhysicalDevicePresentId2FeaturesKHR & operator=( PhysicalDevicePresentId2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePresentId2FeaturesKHR & operator=( VkPhysicalDevicePresentId2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentId2FeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentId2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentId2FeaturesKHR & setPresentId2( VULKAN_HPP_NAMESPACE::Bool32 presentId2_ ) VULKAN_HPP_NOEXCEPT
{
presentId2 = presentId2_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePresentId2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePresentId2FeaturesKHR*>( this );
}
operator VkPhysicalDevicePresentId2FeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePresentId2FeaturesKHR*>( this );
}
operator VkPhysicalDevicePresentId2FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePresentId2FeaturesKHR*>( this );
}
operator VkPhysicalDevicePresentId2FeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePresentId2FeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentId2 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePresentId2FeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDevicePresentId2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentId2 == rhs.presentId2 );
#endif
}
bool operator!=( PhysicalDevicePresentId2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentId2FeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentId2 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePresentId2FeaturesKHR>
{
using Type = PhysicalDevicePresentId2FeaturesKHR;
};
// wrapper struct for struct VkPhysicalDevicePresentIdFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentIdFeaturesKHR.html
struct PhysicalDevicePresentIdFeaturesKHR
{
using NativeType = VkPhysicalDevicePresentIdFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentIdFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 presentId_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentId{ presentId_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast<PhysicalDevicePresentIdFeaturesKHR const *>( &rhs ) )
{}
PhysicalDevicePresentIdFeaturesKHR & operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePresentIdFeaturesKHR & operator=( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPresentId( VULKAN_HPP_NAMESPACE::Bool32 presentId_ ) VULKAN_HPP_NOEXCEPT
{
presentId = presentId_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePresentIdFeaturesKHR*>( this );
}
operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePresentIdFeaturesKHR*>( this );
}
operator VkPhysicalDevicePresentIdFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePresentIdFeaturesKHR*>( this );
}
operator VkPhysicalDevicePresentIdFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePresentIdFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentId );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePresentIdFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentId == rhs.presentId );
#endif
}
bool operator!=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentIdFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentId = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePresentIdFeaturesKHR>
{
using Type = PhysicalDevicePresentIdFeaturesKHR;
};
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkPhysicalDevicePresentMeteringFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentMeteringFeaturesNV.html
struct PhysicalDevicePresentMeteringFeaturesNV
{
using NativeType = VkPhysicalDevicePresentMeteringFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentMeteringFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentMeteringFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 presentMetering_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentMetering{ presentMetering_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentMeteringFeaturesNV( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePresentMeteringFeaturesNV( VkPhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePresentMeteringFeaturesNV( *reinterpret_cast<PhysicalDevicePresentMeteringFeaturesNV const *>( &rhs ) )
{}
PhysicalDevicePresentMeteringFeaturesNV & operator=( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePresentMeteringFeaturesNV & operator=( VkPhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentMeteringFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentMeteringFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentMeteringFeaturesNV & setPresentMetering( VULKAN_HPP_NAMESPACE::Bool32 presentMetering_ ) VULKAN_HPP_NOEXCEPT
{
presentMetering = presentMetering_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePresentMeteringFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePresentMeteringFeaturesNV*>( this );
}
operator VkPhysicalDevicePresentMeteringFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePresentMeteringFeaturesNV*>( this );
}
operator VkPhysicalDevicePresentMeteringFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePresentMeteringFeaturesNV*>( this );
}
operator VkPhysicalDevicePresentMeteringFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePresentMeteringFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentMetering );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePresentMeteringFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentMetering == rhs.presentMetering );
#endif
}
bool operator!=( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentMeteringFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentMetering = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePresentMeteringFeaturesNV>
{
using Type = PhysicalDevicePresentMeteringFeaturesNV;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
// wrapper struct for struct VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT.html
struct PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT
{
using NativeType = VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 presentModeFifoLatestReady_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentModeFifoLatestReady{ presentModeFifoLatestReady_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( *reinterpret_cast<PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const *>( &rhs ) )
{}
PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & operator=( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & operator=( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & setPresentModeFifoLatestReady( VULKAN_HPP_NAMESPACE::Bool32 presentModeFifoLatestReady_ ) VULKAN_HPP_NOEXCEPT
{
presentModeFifoLatestReady = presentModeFifoLatestReady_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT*>( this );
}
operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT*>( this );
}
operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT*>( this );
}
operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentModeFifoLatestReady );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentModeFifoLatestReady == rhs.presentModeFifoLatestReady );
#endif
}
bool operator!=( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentModeFifoLatestReady = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT>
{
using Type = PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDevicePresentWait2FeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentWait2FeaturesKHR.html
struct PhysicalDevicePresentWait2FeaturesKHR
{
using NativeType = VkPhysicalDevicePresentWait2FeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentWait2FeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWait2FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 presentWait2_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentWait2{ presentWait2_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWait2FeaturesKHR( PhysicalDevicePresentWait2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePresentWait2FeaturesKHR( VkPhysicalDevicePresentWait2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePresentWait2FeaturesKHR( *reinterpret_cast<PhysicalDevicePresentWait2FeaturesKHR const *>( &rhs ) )
{}
PhysicalDevicePresentWait2FeaturesKHR & operator=( PhysicalDevicePresentWait2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePresentWait2FeaturesKHR & operator=( VkPhysicalDevicePresentWait2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWait2FeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWait2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWait2FeaturesKHR & setPresentWait2( VULKAN_HPP_NAMESPACE::Bool32 presentWait2_ ) VULKAN_HPP_NOEXCEPT
{
presentWait2 = presentWait2_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePresentWait2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePresentWait2FeaturesKHR*>( this );
}
operator VkPhysicalDevicePresentWait2FeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePresentWait2FeaturesKHR*>( this );
}
operator VkPhysicalDevicePresentWait2FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePresentWait2FeaturesKHR*>( this );
}
operator VkPhysicalDevicePresentWait2FeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePresentWait2FeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentWait2 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePresentWait2FeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDevicePresentWait2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentWait2 == rhs.presentWait2 );
#endif
}
bool operator!=( PhysicalDevicePresentWait2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentWait2FeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentWait2 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePresentWait2FeaturesKHR>
{
using Type = PhysicalDevicePresentWait2FeaturesKHR;
};
// wrapper struct for struct VkPhysicalDevicePresentWaitFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentWaitFeaturesKHR.html
struct PhysicalDevicePresentWaitFeaturesKHR
{
using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 presentWait_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentWait{ presentWait_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePresentWaitFeaturesKHR( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePresentWaitFeaturesKHR( *reinterpret_cast<PhysicalDevicePresentWaitFeaturesKHR const *>( &rhs ) )
{}
PhysicalDevicePresentWaitFeaturesKHR & operator=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePresentWaitFeaturesKHR & operator=( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPresentWait( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ ) VULKAN_HPP_NOEXCEPT
{
presentWait = presentWait_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePresentWaitFeaturesKHR*>( this );
}
operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePresentWaitFeaturesKHR*>( this );
}
operator VkPhysicalDevicePresentWaitFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePresentWaitFeaturesKHR*>( this );
}
operator VkPhysicalDevicePresentWaitFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePresentWaitFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentWait );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePresentWaitFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentWait == rhs.presentWait );
#endif
}
bool operator!=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentWait = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePresentWaitFeaturesKHR>
{
using Type = PhysicalDevicePresentWaitFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT.html
struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT
{
using NativeType = VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, primitiveTopologyListRestart{ primitiveTopologyListRestart_ }, primitiveTopologyPatchListRestart{ primitiveTopologyPatchListRestart_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( *reinterpret_cast<PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *>( &rhs ) )
{}
PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPrimitiveTopologyListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ ) VULKAN_HPP_NOEXCEPT
{
primitiveTopologyListRestart = primitiveTopologyListRestart_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPrimitiveTopologyPatchListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ ) VULKAN_HPP_NOEXCEPT
{
primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT*>( this );
}
operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT*>( this );
}
operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT*>( this );
}
operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, primitiveTopologyListRestart, primitiveTopologyPatchListRestart );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( primitiveTopologyListRestart == rhs.primitiveTopologyListRestart )
&& ( primitiveTopologyPatchListRestart == rhs.primitiveTopologyPatchListRestart );
#endif
}
bool operator!=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart = {};
VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>
{
using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT.html
struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT
{
using NativeType = VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, primitivesGeneratedQuery{ primitivesGeneratedQuery_ }, primitivesGeneratedQueryWithRasterizerDiscard{ primitivesGeneratedQueryWithRasterizerDiscard_ }, primitivesGeneratedQueryWithNonZeroStreams{ primitivesGeneratedQueryWithNonZeroStreams_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( *reinterpret_cast<PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const *>( &rhs ) )
{}
PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPrimitivesGeneratedQuery( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ ) VULKAN_HPP_NOEXCEPT
{
primitivesGeneratedQuery = primitivesGeneratedQuery_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPrimitivesGeneratedQueryWithRasterizerDiscard( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ ) VULKAN_HPP_NOEXCEPT
{
primitivesGeneratedQueryWithRasterizerDiscard = primitivesGeneratedQueryWithRasterizerDiscard_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPrimitivesGeneratedQueryWithNonZeroStreams( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ ) VULKAN_HPP_NOEXCEPT
{
primitivesGeneratedQueryWithNonZeroStreams = primitivesGeneratedQueryWithNonZeroStreams_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT*>( this );
}
operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT*>( this );
}
operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT*>( this );
}
operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, primitivesGeneratedQuery, primitivesGeneratedQueryWithRasterizerDiscard, primitivesGeneratedQueryWithNonZeroStreams );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( primitivesGeneratedQuery == rhs.primitivesGeneratedQuery )
&& ( primitivesGeneratedQueryWithRasterizerDiscard == rhs.primitivesGeneratedQueryWithRasterizerDiscard )
&& ( primitivesGeneratedQueryWithNonZeroStreams == rhs.primitivesGeneratedQueryWithNonZeroStreams );
#endif
}
bool operator!=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery = {};
VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard = {};
VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>
{
using Type = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDevicePrivateDataFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePrivateDataFeatures.html
struct PhysicalDevicePrivateDataFeatures
{
using NativeType = VkPhysicalDevicePrivateDataFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures(VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, privateData{ privateData_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePrivateDataFeatures( *reinterpret_cast<PhysicalDevicePrivateDataFeatures const *>( &rhs ) )
{}
PhysicalDevicePrivateDataFeatures & operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePrivateDataFeatures & operator=( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
{
privateData = privateData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePrivateDataFeatures*>( this );
}
operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePrivateDataFeatures*>( this );
}
operator VkPhysicalDevicePrivateDataFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePrivateDataFeatures*>( this );
}
operator VkPhysicalDevicePrivateDataFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePrivateDataFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, privateData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePrivateDataFeatures const & ) const = default;
#else
bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( privateData == rhs.privateData );
#endif
}
bool operator!=( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePrivateDataFeatures>
{
using Type = PhysicalDevicePrivateDataFeatures;
};
using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures;
// wrapper struct for struct VkPhysicalDeviceProtectedMemoryFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProtectedMemoryFeatures.html
struct PhysicalDeviceProtectedMemoryFeatures
{
using NativeType = VkPhysicalDeviceProtectedMemoryFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures(VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, protectedMemory{ protectedMemory_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceProtectedMemoryFeatures( *reinterpret_cast<PhysicalDeviceProtectedMemoryFeatures const *>( &rhs ) )
{}
PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
{
protectedMemory = protectedMemory_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures*>( this );
}
operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures*>( this );
}
operator VkPhysicalDeviceProtectedMemoryFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures*>( this );
}
operator VkPhysicalDeviceProtectedMemoryFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, protectedMemory );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( protectedMemory == rhs.protectedMemory );
#endif
}
bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryFeatures>
{
using Type = PhysicalDeviceProtectedMemoryFeatures;
};
// wrapper struct for struct VkPhysicalDeviceProtectedMemoryProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProtectedMemoryProperties.html
struct PhysicalDeviceProtectedMemoryProperties
{
using NativeType = VkPhysicalDeviceProtectedMemoryProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties(VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, protectedNoFault{ protectedNoFault_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast<PhysicalDeviceProtectedMemoryProperties const *>( &rhs ) )
{}
PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties*>( this );
}
operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties*>( this );
}
operator VkPhysicalDeviceProtectedMemoryProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties*>( this );
}
operator VkPhysicalDeviceProtectedMemoryProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, protectedNoFault );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( protectedNoFault == rhs.protectedNoFault );
#endif
}
bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryProperties>
{
using Type = PhysicalDeviceProtectedMemoryProperties;
};
// wrapper struct for struct VkPhysicalDeviceProvokingVertexFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProvokingVertexFeaturesEXT.html
struct PhysicalDeviceProvokingVertexFeaturesEXT
{
using NativeType = VkPhysicalDeviceProvokingVertexFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, provokingVertexLast{ provokingVertexLast_ }, transformFeedbackPreservesProvokingVertex{ transformFeedbackPreservesProvokingVertex_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceProvokingVertexFeaturesEXT( *reinterpret_cast<PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceProvokingVertexFeaturesEXT & operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceProvokingVertexFeaturesEXT & operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT
{
provokingVertexLast = provokingVertexLast_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setTransformFeedbackPreservesProvokingVertex( VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT
{
transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT*>( this );
}
operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceProvokingVertexFeaturesEXT*>( this );
}
operator VkPhysicalDeviceProvokingVertexFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT*>( this );
}
operator VkPhysicalDeviceProvokingVertexFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceProvokingVertexFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, provokingVertexLast, transformFeedbackPreservesProvokingVertex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( provokingVertexLast == rhs.provokingVertexLast )
&& ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex );
#endif
}
bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT>
{
using Type = PhysicalDeviceProvokingVertexFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceProvokingVertexPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProvokingVertexPropertiesEXT.html
struct PhysicalDeviceProvokingVertexPropertiesEXT
{
using NativeType = VkPhysicalDeviceProvokingVertexPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, provokingVertexModePerPipeline{ provokingVertexModePerPipeline_ }, transformFeedbackPreservesTriangleFanProvokingVertex{ transformFeedbackPreservesTriangleFanProvokingVertex_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceProvokingVertexPropertiesEXT( *reinterpret_cast<PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceProvokingVertexPropertiesEXT & operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceProvokingVertexPropertiesEXT & operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProvokingVertexPropertiesEXT*>( this );
}
operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT*>( this );
}
operator VkPhysicalDeviceProvokingVertexPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceProvokingVertexPropertiesEXT*>( this );
}
operator VkPhysicalDeviceProvokingVertexPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline )
&& ( transformFeedbackPreservesTriangleFanProvokingVertex == rhs.transformFeedbackPreservesTriangleFanProvokingVertex );
#endif
}
bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT>
{
using Type = PhysicalDeviceProvokingVertexPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDevicePushDescriptorProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePushDescriptorProperties.html
struct PhysicalDevicePushDescriptorProperties
{
using NativeType = VkPhysicalDevicePushDescriptorProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorProperties(uint32_t maxPushDescriptors_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxPushDescriptors{ maxPushDescriptors_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorProperties( PhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePushDescriptorProperties( VkPhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePushDescriptorProperties( *reinterpret_cast<PhysicalDevicePushDescriptorProperties const *>( &rhs ) )
{}
PhysicalDevicePushDescriptorProperties & operator=( PhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDevicePushDescriptorProperties & operator=( VkPhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDevicePushDescriptorProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePushDescriptorProperties*>( this );
}
operator VkPhysicalDevicePushDescriptorProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePushDescriptorProperties*>( this );
}
operator VkPhysicalDevicePushDescriptorProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDevicePushDescriptorProperties*>( this );
}
operator VkPhysicalDevicePushDescriptorProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDevicePushDescriptorProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxPushDescriptors );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDevicePushDescriptorProperties const & ) const = default;
#else
bool operator==( PhysicalDevicePushDescriptorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxPushDescriptors == rhs.maxPushDescriptors );
#endif
}
bool operator!=( PhysicalDevicePushDescriptorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorProperties;
void * pNext = {};
uint32_t maxPushDescriptors = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePushDescriptorProperties>
{
using Type = PhysicalDevicePushDescriptorProperties;
};
using PhysicalDevicePushDescriptorPropertiesKHR = PhysicalDevicePushDescriptorProperties;
// wrapper struct for struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT.html
struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT
{
using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, formatRgba10x6WithoutYCbCrSampler{ formatRgba10x6WithoutYCbCrSampler_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRGBA10X6FormatsFeaturesEXT( *reinterpret_cast<PhysicalDeviceRGBA10X6FormatsFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setFormatRgba10x6WithoutYCbCrSampler( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ ) VULKAN_HPP_NOEXCEPT
{
formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT*>( this );
}
operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT*>( this );
}
operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT*>( this );
}
operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, formatRgba10x6WithoutYCbCrSampler );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler );
#endif
}
bool operator!=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT>
{
using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.html
struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT
{
using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, rasterizationOrderColorAttachmentAccess{ rasterizationOrderColorAttachmentAccess_ }, rasterizationOrderDepthAttachmentAccess{ rasterizationOrderDepthAttachmentAccess_ }, rasterizationOrderStencilAttachmentAccess{ rasterizationOrderStencilAttachmentAccess_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( *reinterpret_cast<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & operator=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & operator=( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setRasterizationOrderColorAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setRasterizationOrderDepthAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setRasterizationOrderStencilAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT*>( this );
}
operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT*>( this );
}
operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT*>( this );
}
operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, rasterizationOrderColorAttachmentAccess, rasterizationOrderDepthAttachmentAccess, rasterizationOrderStencilAttachmentAccess );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess )
&& ( rasterizationOrderDepthAttachmentAccess == rhs.rasterizationOrderDepthAttachmentAccess )
&& ( rasterizationOrderStencilAttachmentAccess == rhs.rasterizationOrderStencilAttachmentAccess );
#endif
}
bool operator!=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>
{
using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
};
using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
// wrapper struct for struct VkPhysicalDeviceRawAccessChainsFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRawAccessChainsFeaturesNV.html
struct PhysicalDeviceRawAccessChainsFeaturesNV
{
using NativeType = VkPhysicalDeviceRawAccessChainsFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRawAccessChainsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shaderRawAccessChains_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderRawAccessChains{ shaderRawAccessChains_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRawAccessChainsFeaturesNV( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRawAccessChainsFeaturesNV( VkPhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRawAccessChainsFeaturesNV( *reinterpret_cast<PhysicalDeviceRawAccessChainsFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceRawAccessChainsFeaturesNV & operator=( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRawAccessChainsFeaturesNV & operator=( VkPhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRawAccessChainsFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRawAccessChainsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRawAccessChainsFeaturesNV & setShaderRawAccessChains( VULKAN_HPP_NAMESPACE::Bool32 shaderRawAccessChains_ ) VULKAN_HPP_NOEXCEPT
{
shaderRawAccessChains = shaderRawAccessChains_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRawAccessChainsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRawAccessChainsFeaturesNV*>( this );
}
operator VkPhysicalDeviceRawAccessChainsFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRawAccessChainsFeaturesNV*>( this );
}
operator VkPhysicalDeviceRawAccessChainsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRawAccessChainsFeaturesNV*>( this );
}
operator VkPhysicalDeviceRawAccessChainsFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRawAccessChainsFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderRawAccessChains );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRawAccessChainsFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderRawAccessChains == rhs.shaderRawAccessChains );
#endif
}
bool operator!=( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRawAccessChains = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV>
{
using Type = PhysicalDeviceRawAccessChainsFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceRayQueryFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayQueryFeaturesKHR.html
struct PhysicalDeviceRayQueryFeaturesKHR
{
using NativeType = VkPhysicalDeviceRayQueryFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, rayQuery{ rayQuery_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceRayQueryFeaturesKHR & operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT
{
rayQuery = rayQuery_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayQueryFeaturesKHR*>( this );
}
operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayQueryFeaturesKHR*>( this );
}
operator VkPhysicalDeviceRayQueryFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRayQueryFeaturesKHR*>( this );
}
operator VkPhysicalDeviceRayQueryFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRayQueryFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, rayQuery );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( rayQuery == rhs.rayQuery );
#endif
}
bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayQueryFeaturesKHR>
{
using Type = PhysicalDeviceRayQueryFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV.html
struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV
{
using NativeType = VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, rayTracingInvocationReorder{ rayTracingInvocationReorder_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesNV( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayTracingInvocationReorderFeaturesNV( *reinterpret_cast<PhysicalDeviceRayTracingInvocationReorderFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceRayTracingInvocationReorderFeaturesNV & operator=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRayTracingInvocationReorderFeaturesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & setRayTracingInvocationReorder( VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingInvocationReorder = rayTracingInvocationReorder_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV*>( this );
}
operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV*>( this );
}
operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV*>( this );
}
operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, rayTracingInvocationReorder );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( rayTracingInvocationReorder == rhs.rayTracingInvocationReorder );
#endif
}
bool operator!=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV>
{
using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV.html
struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV
{
using NativeType = VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesNV(VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint_ = VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, rayTracingInvocationReorderReorderingHint{ rayTracingInvocationReorderReorderingHint_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesNV( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayTracingInvocationReorderPropertiesNV( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayTracingInvocationReorderPropertiesNV( *reinterpret_cast<PhysicalDeviceRayTracingInvocationReorderPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceRayTracingInvocationReorderPropertiesNV & operator=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRayTracingInvocationReorderPropertiesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV*>( this );
}
operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV*>( this );
}
operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV*>( this );
}
operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, rayTracingInvocationReorderReorderingHint );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( rayTracingInvocationReorderReorderingHint == rhs.rayTracingInvocationReorderReorderingHint );
#endif
}
bool operator!=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint = VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV>
{
using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV.html
struct PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV
{
using NativeType = VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 spheres_ = {}, VULKAN_HPP_NAMESPACE::Bool32 linearSweptSpheres_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, spheres{ spheres_ }, linearSweptSpheres{ linearSweptSpheres_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( *reinterpret_cast<PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & operator=( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & operator=( VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setSpheres( VULKAN_HPP_NAMESPACE::Bool32 spheres_ ) VULKAN_HPP_NOEXCEPT
{
spheres = spheres_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setLinearSweptSpheres( VULKAN_HPP_NAMESPACE::Bool32 linearSweptSpheres_ ) VULKAN_HPP_NOEXCEPT
{
linearSweptSpheres = linearSweptSpheres_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV*>( this );
}
operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV*>( this );
}
operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV*>( this );
}
operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, spheres, linearSweptSpheres );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( spheres == rhs.spheres )
&& ( linearSweptSpheres == rhs.linearSweptSpheres );
#endif
}
bool operator!=( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 spheres = {};
VULKAN_HPP_NAMESPACE::Bool32 linearSweptSpheres = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV>
{
using Type = PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR.html
struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR
{
using NativeType = VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, rayTracingMaintenance1{ rayTracingMaintenance1_ }, rayTracingPipelineTraceRaysIndirect2{ rayTracingPipelineTraceRaysIndirect2_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayTracingMaintenance1FeaturesKHR( *reinterpret_cast<PhysicalDeviceRayTracingMaintenance1FeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setRayTracingMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingMaintenance1 = rayTracingMaintenance1_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setRayTracingPipelineTraceRaysIndirect2( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingPipelineTraceRaysIndirect2 = rayTracingPipelineTraceRaysIndirect2_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR*>( this );
}
operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR*>( this );
}
operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR*>( this );
}
operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, rayTracingMaintenance1, rayTracingPipelineTraceRaysIndirect2 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( rayTracingMaintenance1 == rhs.rayTracingMaintenance1 )
&& ( rayTracingPipelineTraceRaysIndirect2 == rhs.rayTracingPipelineTraceRaysIndirect2 );
#endif
}
bool operator!=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1 = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR>
{
using Type = PhysicalDeviceRayTracingMaintenance1FeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceRayTracingMotionBlurFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingMotionBlurFeaturesNV.html
struct PhysicalDeviceRayTracingMotionBlurFeaturesNV
{
using NativeType = VkPhysicalDeviceRayTracingMotionBlurFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, rayTracingMotionBlur{ rayTracingMotionBlur_ }, rayTracingMotionBlurPipelineTraceRaysIndirect{ rayTracingMotionBlurPipelineTraceRaysIndirect_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayTracingMotionBlurFeaturesNV( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayTracingMotionBlurFeaturesNV( *reinterpret_cast<PhysicalDeviceRayTracingMotionBlurFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setRayTracingMotionBlur( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingMotionBlur = rayTracingMotionBlur_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setRayTracingMotionBlurPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingMotionBlurPipelineTraceRaysIndirect = rayTracingMotionBlurPipelineTraceRaysIndirect_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingMotionBlurFeaturesNV*>( this );
}
operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingMotionBlurFeaturesNV*>( this );
}
operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRayTracingMotionBlurFeaturesNV*>( this );
}
operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRayTracingMotionBlurFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, rayTracingMotionBlur, rayTracingMotionBlurPipelineTraceRaysIndirect );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( rayTracingMotionBlur == rhs.rayTracingMotionBlur )
&& ( rayTracingMotionBlurPipelineTraceRaysIndirect == rhs.rayTracingMotionBlurPipelineTraceRaysIndirect );
#endif
}
bool operator!=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV>
{
using Type = PhysicalDeviceRayTracingMotionBlurFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceRayTracingPipelineFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingPipelineFeaturesKHR.html
struct PhysicalDeviceRayTracingPipelineFeaturesKHR
{
using NativeType = VkPhysicalDeviceRayTracingPipelineFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, rayTracingPipeline{ rayTracingPipeline_ }, rayTracingPipelineShaderGroupHandleCaptureReplay{ rayTracingPipelineShaderGroupHandleCaptureReplay_ }, rayTracingPipelineShaderGroupHandleCaptureReplayMixed{ rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ }, rayTracingPipelineTraceRaysIndirect{ rayTracingPipelineTraceRaysIndirect_ }, rayTraversalPrimitiveCulling{ rayTraversalPrimitiveCulling_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayTracingPipelineFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingPipeline = rayTracingPipeline_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT
{
rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelineFeaturesKHR*>( this );
}
operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelineFeaturesKHR*>( this );
}
operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRayTracingPipelineFeaturesKHR*>( this );
}
operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRayTracingPipelineFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, rayTracingPipeline, rayTracingPipelineShaderGroupHandleCaptureReplay, rayTracingPipelineShaderGroupHandleCaptureReplayMixed, rayTracingPipelineTraceRaysIndirect, rayTraversalPrimitiveCulling );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( rayTracingPipeline == rhs.rayTracingPipeline )
&& ( rayTracingPipelineShaderGroupHandleCaptureReplay == rhs.rayTracingPipelineShaderGroupHandleCaptureReplay )
&& ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed )
&& ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect )
&& ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling );
#endif
}
bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR>
{
using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceRayTracingPipelinePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingPipelinePropertiesKHR.html
struct PhysicalDeviceRayTracingPipelinePropertiesKHR
{
using NativeType = VkPhysicalDeviceRayTracingPipelinePropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRayRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint32_t shaderGroupHandleCaptureReplaySize_ = {}, uint32_t maxRayDispatchInvocationCount_ = {}, uint32_t shaderGroupHandleAlignment_ = {}, uint32_t maxRayHitAttributeSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderGroupHandleSize{ shaderGroupHandleSize_ }, maxRayRecursionDepth{ maxRayRecursionDepth_ }, maxShaderGroupStride{ maxShaderGroupStride_ }, shaderGroupBaseAlignment{ shaderGroupBaseAlignment_ }, shaderGroupHandleCaptureReplaySize{ shaderGroupHandleCaptureReplaySize_ }, maxRayDispatchInvocationCount{ maxRayDispatchInvocationCount_ }, shaderGroupHandleAlignment{ shaderGroupHandleAlignment_ }, maxRayHitAttributeSize{ maxRayHitAttributeSize_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayTracingPipelinePropertiesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs ) )
{}
PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelinePropertiesKHR*>( this );
}
operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelinePropertiesKHR*>( this );
}
operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRayTracingPipelinePropertiesKHR*>( this );
}
operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRayTracingPipelinePropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderGroupHandleSize, maxRayRecursionDepth, maxShaderGroupStride, shaderGroupBaseAlignment, shaderGroupHandleCaptureReplaySize, maxRayDispatchInvocationCount, shaderGroupHandleAlignment, maxRayHitAttributeSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
&& ( maxRayRecursionDepth == rhs.maxRayRecursionDepth )
&& ( maxShaderGroupStride == rhs.maxShaderGroupStride )
&& ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
&& ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize )
&& ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount )
&& ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment )
&& ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize );
#endif
}
bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
void * pNext = {};
uint32_t shaderGroupHandleSize = {};
uint32_t maxRayRecursionDepth = {};
uint32_t maxShaderGroupStride = {};
uint32_t shaderGroupBaseAlignment = {};
uint32_t shaderGroupHandleCaptureReplaySize = {};
uint32_t maxRayDispatchInvocationCount = {};
uint32_t shaderGroupHandleAlignment = {};
uint32_t maxRayHitAttributeSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR>
{
using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR;
};
// wrapper struct for struct VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR.html
struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR
{
using NativeType = VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPositionFetchFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, rayTracingPositionFetch{ rayTracingPositionFetch_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPositionFetchFeaturesKHR( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayTracingPositionFetchFeaturesKHR( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayTracingPositionFetchFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPositionFetchFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceRayTracingPositionFetchFeaturesKHR & operator=( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRayTracingPositionFetchFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPositionFetchFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPositionFetchFeaturesKHR & setRayTracingPositionFetch( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingPositionFetch = rayTracingPositionFetch_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR*>( this );
}
operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR*>( this );
}
operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR*>( this );
}
operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, rayTracingPositionFetch );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( rayTracingPositionFetch == rhs.rayTracingPositionFetch );
#endif
}
bool operator!=( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR>
{
using Type = PhysicalDeviceRayTracingPositionFetchFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceRayTracingPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingPropertiesNV.html
struct PhysicalDeviceRayTracingPropertiesNV
{
using NativeType = VkPhysicalDeviceRayTracingPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxTriangleCount_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderGroupHandleSize{ shaderGroupHandleSize_ }, maxRecursionDepth{ maxRecursionDepth_ }, maxShaderGroupStride{ maxShaderGroupStride_ }, shaderGroupBaseAlignment{ shaderGroupBaseAlignment_ }, maxGeometryCount{ maxGeometryCount_ }, maxInstanceCount{ maxInstanceCount_ }, maxTriangleCount{ maxTriangleCount_ }, maxDescriptorSetAccelerationStructures{ maxDescriptorSetAccelerationStructures_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast<PhysicalDeviceRayTracingPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV*>( this );
}
operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV*>( this );
}
operator VkPhysicalDeviceRayTracingPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV*>( this );
}
operator VkPhysicalDeviceRayTracingPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderGroupHandleSize, maxRecursionDepth, maxShaderGroupStride, shaderGroupBaseAlignment, maxGeometryCount, maxInstanceCount, maxTriangleCount, maxDescriptorSetAccelerationStructures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
&& ( maxRecursionDepth == rhs.maxRecursionDepth )
&& ( maxShaderGroupStride == rhs.maxShaderGroupStride )
&& ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
&& ( maxGeometryCount == rhs.maxGeometryCount )
&& ( maxInstanceCount == rhs.maxInstanceCount )
&& ( maxTriangleCount == rhs.maxTriangleCount )
&& ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures );
#endif
}
bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
void * pNext = {};
uint32_t shaderGroupHandleSize = {};
uint32_t maxRecursionDepth = {};
uint32_t maxShaderGroupStride = {};
uint32_t shaderGroupBaseAlignment = {};
uint64_t maxGeometryCount = {};
uint64_t maxInstanceCount = {};
uint64_t maxTriangleCount = {};
uint32_t maxDescriptorSetAccelerationStructures = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPropertiesNV>
{
using Type = PhysicalDeviceRayTracingPropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceRayTracingValidationFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingValidationFeaturesNV.html
struct PhysicalDeviceRayTracingValidationFeaturesNV
{
using NativeType = VkPhysicalDeviceRayTracingValidationFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingValidationFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 rayTracingValidation_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, rayTracingValidation{ rayTracingValidation_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingValidationFeaturesNV( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayTracingValidationFeaturesNV( VkPhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayTracingValidationFeaturesNV( *reinterpret_cast<PhysicalDeviceRayTracingValidationFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceRayTracingValidationFeaturesNV & operator=( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRayTracingValidationFeaturesNV & operator=( VkPhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingValidationFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingValidationFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingValidationFeaturesNV & setRayTracingValidation( VULKAN_HPP_NAMESPACE::Bool32 rayTracingValidation_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingValidation = rayTracingValidation_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRayTracingValidationFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingValidationFeaturesNV*>( this );
}
operator VkPhysicalDeviceRayTracingValidationFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingValidationFeaturesNV*>( this );
}
operator VkPhysicalDeviceRayTracingValidationFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRayTracingValidationFeaturesNV*>( this );
}
operator VkPhysicalDeviceRayTracingValidationFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRayTracingValidationFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, rayTracingValidation );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRayTracingValidationFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( rayTracingValidation == rhs.rayTracingValidation );
#endif
}
bool operator!=( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingValidation = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV>
{
using Type = PhysicalDeviceRayTracingValidationFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG.html
struct PhysicalDeviceRelaxedLineRasterizationFeaturesIMG
{
using NativeType = VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRelaxedLineRasterizationFeaturesIMG(VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, relaxedLineRasterization{ relaxedLineRasterization_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( *reinterpret_cast<PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const *>( &rhs ) )
{}
PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & operator=( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & operator=( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & setRelaxedLineRasterization( VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization_ ) VULKAN_HPP_NOEXCEPT
{
relaxedLineRasterization = relaxedLineRasterization_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG*>( this );
}
operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG*>( this );
}
operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG*>( this );
}
operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, relaxedLineRasterization );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & ) const = default;
#else
bool operator==( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( relaxedLineRasterization == rhs.relaxedLineRasterization );
#endif
}
bool operator!=( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG>
{
using Type = PhysicalDeviceRelaxedLineRasterizationFeaturesIMG;
};
// wrapper struct for struct VkPhysicalDeviceRenderPassStripedFeaturesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRenderPassStripedFeaturesARM.html
struct PhysicalDeviceRenderPassStripedFeaturesARM
{
using NativeType = VkPhysicalDeviceRenderPassStripedFeaturesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedFeaturesARM(VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, renderPassStriped{ renderPassStriped_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedFeaturesARM( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRenderPassStripedFeaturesARM( VkPhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRenderPassStripedFeaturesARM( *reinterpret_cast<PhysicalDeviceRenderPassStripedFeaturesARM const *>( &rhs ) )
{}
PhysicalDeviceRenderPassStripedFeaturesARM & operator=( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRenderPassStripedFeaturesARM & operator=( VkPhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedFeaturesARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRenderPassStripedFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRenderPassStripedFeaturesARM & setRenderPassStriped( VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped_ ) VULKAN_HPP_NOEXCEPT
{
renderPassStriped = renderPassStriped_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRenderPassStripedFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRenderPassStripedFeaturesARM*>( this );
}
operator VkPhysicalDeviceRenderPassStripedFeaturesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRenderPassStripedFeaturesARM*>( this );
}
operator VkPhysicalDeviceRenderPassStripedFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRenderPassStripedFeaturesARM*>( this );
}
operator VkPhysicalDeviceRenderPassStripedFeaturesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRenderPassStripedFeaturesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, renderPassStriped );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRenderPassStripedFeaturesARM const & ) const = default;
#else
bool operator==( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( renderPassStriped == rhs.renderPassStriped );
#endif
}
bool operator!=( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM>
{
using Type = PhysicalDeviceRenderPassStripedFeaturesARM;
};
// wrapper struct for struct VkPhysicalDeviceRenderPassStripedPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRenderPassStripedPropertiesARM.html
struct PhysicalDeviceRenderPassStripedPropertiesARM
{
using NativeType = VkPhysicalDeviceRenderPassStripedPropertiesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedPropertiesARM(VULKAN_HPP_NAMESPACE::Extent2D renderPassStripeGranularity_ = {}, uint32_t maxRenderPassStripes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, renderPassStripeGranularity{ renderPassStripeGranularity_ }, maxRenderPassStripes{ maxRenderPassStripes_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedPropertiesARM( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRenderPassStripedPropertiesARM( VkPhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRenderPassStripedPropertiesARM( *reinterpret_cast<PhysicalDeviceRenderPassStripedPropertiesARM const *>( &rhs ) )
{}
PhysicalDeviceRenderPassStripedPropertiesARM & operator=( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRenderPassStripedPropertiesARM & operator=( VkPhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedPropertiesARM const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceRenderPassStripedPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRenderPassStripedPropertiesARM*>( this );
}
operator VkPhysicalDeviceRenderPassStripedPropertiesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRenderPassStripedPropertiesARM*>( this );
}
operator VkPhysicalDeviceRenderPassStripedPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRenderPassStripedPropertiesARM*>( this );
}
operator VkPhysicalDeviceRenderPassStripedPropertiesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRenderPassStripedPropertiesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, renderPassStripeGranularity, maxRenderPassStripes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRenderPassStripedPropertiesARM const & ) const = default;
#else
bool operator==( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( renderPassStripeGranularity == rhs.renderPassStripeGranularity )
&& ( maxRenderPassStripes == rhs.maxRenderPassStripes );
#endif
}
bool operator!=( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D renderPassStripeGranularity = {};
uint32_t maxRenderPassStripes = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM>
{
using Type = PhysicalDeviceRenderPassStripedPropertiesARM;
};
// wrapper struct for struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV.html
struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
{
using NativeType = VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, representativeFragmentTest{ representativeFragmentTest_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRepresentativeFragmentTestFeaturesNV( *reinterpret_cast<PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
{
representativeFragmentTest = representativeFragmentTest_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
}
operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
}
operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
}
operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, representativeFragmentTest );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( representativeFragmentTest == rhs.representativeFragmentTest );
#endif
}
bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV>
{
using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceRobustness2FeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRobustness2FeaturesKHR.html
struct PhysicalDeviceRobustness2FeaturesKHR
{
using NativeType = VkPhysicalDeviceRobustness2FeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, robustBufferAccess2{ robustBufferAccess2_ }, robustImageAccess2{ robustImageAccess2_ }, nullDescriptor{ nullDescriptor_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesKHR( PhysicalDeviceRobustness2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRobustness2FeaturesKHR( VkPhysicalDeviceRobustness2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRobustness2FeaturesKHR( *reinterpret_cast<PhysicalDeviceRobustness2FeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceRobustness2FeaturesKHR & operator=( PhysicalDeviceRobustness2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRobustness2FeaturesKHR & operator=( VkPhysicalDeviceRobustness2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesKHR & setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT
{
robustBufferAccess2 = robustBufferAccess2_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesKHR & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT
{
robustImageAccess2 = robustImageAccess2_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesKHR & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT
{
nullDescriptor = nullDescriptor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceRobustness2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRobustness2FeaturesKHR*>( this );
}
operator VkPhysicalDeviceRobustness2FeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRobustness2FeaturesKHR*>( this );
}
operator VkPhysicalDeviceRobustness2FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRobustness2FeaturesKHR*>( this );
}
operator VkPhysicalDeviceRobustness2FeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRobustness2FeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, robustBufferAccess2, robustImageAccess2, nullDescriptor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRobustness2FeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceRobustness2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( robustBufferAccess2 == rhs.robustBufferAccess2 )
&& ( robustImageAccess2 == rhs.robustImageAccess2 )
&& ( nullDescriptor == rhs.nullDescriptor );
#endif
}
bool operator!=( PhysicalDeviceRobustness2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {};
VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {};
VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2FeaturesKHR>
{
using Type = PhysicalDeviceRobustness2FeaturesKHR;
};
using PhysicalDeviceRobustness2FeaturesEXT = PhysicalDeviceRobustness2FeaturesKHR;
// wrapper struct for struct VkPhysicalDeviceRobustness2PropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRobustness2PropertiesKHR.html
struct PhysicalDeviceRobustness2PropertiesKHR
{
using NativeType = VkPhysicalDeviceRobustness2PropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesKHR(VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, robustStorageBufferAccessSizeAlignment{ robustStorageBufferAccessSizeAlignment_ }, robustUniformBufferAccessSizeAlignment{ robustUniformBufferAccessSizeAlignment_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesKHR( PhysicalDeviceRobustness2PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRobustness2PropertiesKHR( VkPhysicalDeviceRobustness2PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRobustness2PropertiesKHR( *reinterpret_cast<PhysicalDeviceRobustness2PropertiesKHR const *>( &rhs ) )
{}
PhysicalDeviceRobustness2PropertiesKHR & operator=( PhysicalDeviceRobustness2PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceRobustness2PropertiesKHR & operator=( VkPhysicalDeviceRobustness2PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesKHR const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceRobustness2PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRobustness2PropertiesKHR*>( this );
}
operator VkPhysicalDeviceRobustness2PropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRobustness2PropertiesKHR*>( this );
}
operator VkPhysicalDeviceRobustness2PropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceRobustness2PropertiesKHR*>( this );
}
operator VkPhysicalDeviceRobustness2PropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceRobustness2PropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, robustStorageBufferAccessSizeAlignment, robustUniformBufferAccessSizeAlignment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceRobustness2PropertiesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceRobustness2PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment )
&& ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment );
#endif
}
bool operator!=( PhysicalDeviceRobustness2PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2PropertiesKHR>
{
using Type = PhysicalDeviceRobustness2PropertiesKHR;
};
using PhysicalDeviceRobustness2PropertiesEXT = PhysicalDeviceRobustness2PropertiesKHR;
// wrapper struct for struct VkPhysicalDeviceSampleLocationsPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSampleLocationsPropertiesEXT.html
struct PhysicalDeviceSampleLocationsPropertiesEXT
{
using NativeType = VkPhysicalDeviceSampleLocationsPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, std::array<float,2> const & sampleLocationCoordinateRange_ = {}, uint32_t sampleLocationSubPixelBits_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, sampleLocationSampleCounts{ sampleLocationSampleCounts_ }, maxSampleLocationGridSize{ maxSampleLocationGridSize_ }, sampleLocationCoordinateRange{ sampleLocationCoordinateRange_ }, sampleLocationSubPixelBits{ sampleLocationSubPixelBits_ }, variableSampleLocations{ variableSampleLocations_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSampleLocationsPropertiesEXT( *reinterpret_cast<PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
}
operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
}
operator VkPhysicalDeviceSampleLocationsPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
}
operator VkPhysicalDeviceSampleLocationsPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, sampleLocationSampleCounts, maxSampleLocationGridSize, sampleLocationCoordinateRange, sampleLocationSubPixelBits, variableSampleLocations );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts )
&& ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize )
&& ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange )
&& ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits )
&& ( variableSampleLocations == rhs.variableSampleLocations );
#endif
}
bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {};
VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> sampleLocationCoordinateRange = {};
uint32_t sampleLocationSubPixelBits = {};
VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT>
{
using Type = PhysicalDeviceSampleLocationsPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceSamplerFilterMinmaxProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSamplerFilterMinmaxProperties.html
struct PhysicalDeviceSamplerFilterMinmaxProperties
{
using NativeType = VkPhysicalDeviceSamplerFilterMinmaxProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, filterMinmaxSingleComponentFormats{ filterMinmaxSingleComponentFormats_ }, filterMinmaxImageComponentMapping{ filterMinmaxImageComponentMapping_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSamplerFilterMinmaxProperties( *reinterpret_cast<PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs ) )
{}
PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
}
operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
}
operator VkPhysicalDeviceSamplerFilterMinmaxProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
}
operator VkPhysicalDeviceSamplerFilterMinmaxProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
&& ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
#endif
}
bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties>
{
using Type = PhysicalDeviceSamplerFilterMinmaxProperties;
};
using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties;
// wrapper struct for struct VkPhysicalDeviceSamplerYcbcrConversionFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSamplerYcbcrConversionFeatures.html
struct PhysicalDeviceSamplerYcbcrConversionFeatures
{
using NativeType = VkPhysicalDeviceSamplerYcbcrConversionFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, samplerYcbcrConversion{ samplerYcbcrConversion_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSamplerYcbcrConversionFeatures( *reinterpret_cast<PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs ) )
{}
PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
{
samplerYcbcrConversion = samplerYcbcrConversion_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
}
operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
}
operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
}
operator VkPhysicalDeviceSamplerYcbcrConversionFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, samplerYcbcrConversion );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
#endif
}
bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures>
{
using Type = PhysicalDeviceSamplerYcbcrConversionFeatures;
};
using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
// wrapper struct for struct VkPhysicalDeviceScalarBlockLayoutFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceScalarBlockLayoutFeatures.html
struct PhysicalDeviceScalarBlockLayoutFeatures
{
using NativeType = VkPhysicalDeviceScalarBlockLayoutFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, scalarBlockLayout{ scalarBlockLayout_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceScalarBlockLayoutFeatures( *reinterpret_cast<PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs ) )
{}
PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
{
scalarBlockLayout = scalarBlockLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
}
operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
}
operator VkPhysicalDeviceScalarBlockLayoutFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
}
operator VkPhysicalDeviceScalarBlockLayoutFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, scalarBlockLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( scalarBlockLayout == rhs.scalarBlockLayout );
#endif
}
bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceScalarBlockLayoutFeatures>
{
using Type = PhysicalDeviceScalarBlockLayoutFeatures;
};
using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;
// wrapper struct for struct VkPhysicalDeviceSchedulingControlsFeaturesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSchedulingControlsFeaturesARM.html
struct PhysicalDeviceSchedulingControlsFeaturesARM
{
using NativeType = VkPhysicalDeviceSchedulingControlsFeaturesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM(VULKAN_HPP_NAMESPACE::Bool32 schedulingControls_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, schedulingControls{ schedulingControls_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSchedulingControlsFeaturesARM( VkPhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSchedulingControlsFeaturesARM( *reinterpret_cast<PhysicalDeviceSchedulingControlsFeaturesARM const *>( &rhs ) )
{}
PhysicalDeviceSchedulingControlsFeaturesARM & operator=( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSchedulingControlsFeaturesARM & operator=( VkPhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsFeaturesARM & setSchedulingControls( VULKAN_HPP_NAMESPACE::Bool32 schedulingControls_ ) VULKAN_HPP_NOEXCEPT
{
schedulingControls = schedulingControls_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceSchedulingControlsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSchedulingControlsFeaturesARM*>( this );
}
operator VkPhysicalDeviceSchedulingControlsFeaturesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSchedulingControlsFeaturesARM*>( this );
}
operator VkPhysicalDeviceSchedulingControlsFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSchedulingControlsFeaturesARM*>( this );
}
operator VkPhysicalDeviceSchedulingControlsFeaturesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSchedulingControlsFeaturesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, schedulingControls );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSchedulingControlsFeaturesARM const & ) const = default;
#else
bool operator==( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( schedulingControls == rhs.schedulingControls );
#endif
}
bool operator!=( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 schedulingControls = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM>
{
using Type = PhysicalDeviceSchedulingControlsFeaturesARM;
};
// wrapper struct for struct VkPhysicalDeviceSchedulingControlsPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSchedulingControlsPropertiesARM.html
struct PhysicalDeviceSchedulingControlsPropertiesARM
{
using NativeType = VkPhysicalDeviceSchedulingControlsPropertiesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsPropertiesARM(VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, schedulingControlsFlags{ schedulingControlsFlags_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsPropertiesARM( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSchedulingControlsPropertiesARM( VkPhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSchedulingControlsPropertiesARM( *reinterpret_cast<PhysicalDeviceSchedulingControlsPropertiesARM const *>( &rhs ) )
{}
PhysicalDeviceSchedulingControlsPropertiesARM & operator=( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSchedulingControlsPropertiesARM & operator=( VkPhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceSchedulingControlsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSchedulingControlsPropertiesARM*>( this );
}
operator VkPhysicalDeviceSchedulingControlsPropertiesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSchedulingControlsPropertiesARM*>( this );
}
operator VkPhysicalDeviceSchedulingControlsPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSchedulingControlsPropertiesARM*>( this );
}
operator VkPhysicalDeviceSchedulingControlsPropertiesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSchedulingControlsPropertiesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, schedulingControlsFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSchedulingControlsPropertiesARM const & ) const = default;
#else
bool operator==( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( schedulingControlsFlags == rhs.schedulingControlsFlags );
#endif
}
bool operator!=( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM>
{
using Type = PhysicalDeviceSchedulingControlsPropertiesARM;
};
// wrapper struct for struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures.html
struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures
{
using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, separateDepthStencilLayouts{ separateDepthStencilLayouts_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSeparateDepthStencilLayoutsFeatures( *reinterpret_cast<PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs ) )
{}
PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
{
separateDepthStencilLayouts = separateDepthStencilLayouts_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
}
operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
}
operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
}
operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, separateDepthStencilLayouts );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts );
#endif
}
bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures>
{
using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
};
using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
// wrapper struct for struct VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV.html
struct PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV
{
using NativeType = VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderFloat16VectorAtomics{ shaderFloat16VectorAtomics_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & operator=( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & operator=( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & setShaderFloat16VectorAtomics( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloat16VectorAtomics = shaderFloat16VectorAtomics_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV*>( this );
}
operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV*>( this );
}
operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV*>( this );
}
operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderFloat16VectorAtomics );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderFloat16VectorAtomics == rhs.shaderFloat16VectorAtomics );
#endif
}
bool operator!=( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV>
{
using Type = PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT.html
struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT
{
using NativeType = VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderBufferFloat16Atomics{ shaderBufferFloat16Atomics_ }, shaderBufferFloat16AtomicAdd{ shaderBufferFloat16AtomicAdd_ }, shaderBufferFloat16AtomicMinMax{ shaderBufferFloat16AtomicMinMax_ }, shaderBufferFloat32AtomicMinMax{ shaderBufferFloat32AtomicMinMax_ }, shaderBufferFloat64AtomicMinMax{ shaderBufferFloat64AtomicMinMax_ }, shaderSharedFloat16Atomics{ shaderSharedFloat16Atomics_ }, shaderSharedFloat16AtomicAdd{ shaderSharedFloat16AtomicAdd_ }, shaderSharedFloat16AtomicMinMax{ shaderSharedFloat16AtomicMinMax_ }, shaderSharedFloat32AtomicMinMax{ shaderSharedFloat32AtomicMinMax_ }, shaderSharedFloat64AtomicMinMax{ shaderSharedFloat64AtomicMinMax_ }, shaderImageFloat32AtomicMinMax{ shaderImageFloat32AtomicMinMax_ }, sparseImageFloat32AtomicMinMax{ sparseImageFloat32AtomicMinMax_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderAtomicFloat2FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderAtomicFloat2FeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat16AtomicAdd = shaderBufferFloat16AtomicAdd_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat32AtomicMinMax = shaderBufferFloat32AtomicMinMax_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat64AtomicMinMax = shaderBufferFloat64AtomicMinMax_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat16Atomics = shaderSharedFloat16Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat16AtomicAdd = shaderSharedFloat16AtomicAdd_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat16AtomicMinMax = shaderSharedFloat16AtomicMinMax_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setSparseImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
sparseImageFloat32AtomicMinMax = sparseImageFloat32AtomicMinMax_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderBufferFloat16Atomics, shaderBufferFloat16AtomicAdd, shaderBufferFloat16AtomicMinMax, shaderBufferFloat32AtomicMinMax, shaderBufferFloat64AtomicMinMax, shaderSharedFloat16Atomics, shaderSharedFloat16AtomicAdd, shaderSharedFloat16AtomicMinMax, shaderSharedFloat32AtomicMinMax, shaderSharedFloat64AtomicMinMax, shaderImageFloat32AtomicMinMax, sparseImageFloat32AtomicMinMax );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderBufferFloat16Atomics == rhs.shaderBufferFloat16Atomics )
&& ( shaderBufferFloat16AtomicAdd == rhs.shaderBufferFloat16AtomicAdd )
&& ( shaderBufferFloat16AtomicMinMax == rhs.shaderBufferFloat16AtomicMinMax )
&& ( shaderBufferFloat32AtomicMinMax == rhs.shaderBufferFloat32AtomicMinMax )
&& ( shaderBufferFloat64AtomicMinMax == rhs.shaderBufferFloat64AtomicMinMax )
&& ( shaderSharedFloat16Atomics == rhs.shaderSharedFloat16Atomics )
&& ( shaderSharedFloat16AtomicAdd == rhs.shaderSharedFloat16AtomicAdd )
&& ( shaderSharedFloat16AtomicMinMax == rhs.shaderSharedFloat16AtomicMinMax )
&& ( shaderSharedFloat32AtomicMinMax == rhs.shaderSharedFloat32AtomicMinMax )
&& ( shaderSharedFloat64AtomicMinMax == rhs.shaderSharedFloat64AtomicMinMax )
&& ( shaderImageFloat32AtomicMinMax == rhs.shaderImageFloat32AtomicMinMax )
&& ( sparseImageFloat32AtomicMinMax == rhs.sparseImageFloat32AtomicMinMax );
#endif
}
bool operator!=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT>
{
using Type = PhysicalDeviceShaderAtomicFloat2FeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderAtomicFloatFeaturesEXT.html
struct PhysicalDeviceShaderAtomicFloatFeaturesEXT
{
using NativeType = VkPhysicalDeviceShaderAtomicFloatFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderBufferFloat32Atomics{ shaderBufferFloat32Atomics_ }, shaderBufferFloat32AtomicAdd{ shaderBufferFloat32AtomicAdd_ }, shaderBufferFloat64Atomics{ shaderBufferFloat64Atomics_ }, shaderBufferFloat64AtomicAdd{ shaderBufferFloat64AtomicAdd_ }, shaderSharedFloat32Atomics{ shaderSharedFloat32Atomics_ }, shaderSharedFloat32AtomicAdd{ shaderSharedFloat32AtomicAdd_ }, shaderSharedFloat64Atomics{ shaderSharedFloat64Atomics_ }, shaderSharedFloat64AtomicAdd{ shaderSharedFloat64AtomicAdd_ }, shaderImageFloat32Atomics{ shaderImageFloat32Atomics_ }, shaderImageFloat32AtomicAdd{ shaderImageFloat32AtomicAdd_ }, sparseImageFloat32Atomics{ sparseImageFloat32Atomics_ }, sparseImageFloat32AtomicAdd{ sparseImageFloat32AtomicAdd_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderAtomicFloatFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderImageFloat32Atomics = shaderImageFloat32Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
{
sparseImageFloat32Atomics = sparseImageFloat32Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderBufferFloat32Atomics, shaderBufferFloat32AtomicAdd, shaderBufferFloat64Atomics, shaderBufferFloat64AtomicAdd, shaderSharedFloat32Atomics, shaderSharedFloat32AtomicAdd, shaderSharedFloat64Atomics, shaderSharedFloat64AtomicAdd, shaderImageFloat32Atomics, shaderImageFloat32AtomicAdd, sparseImageFloat32Atomics, sparseImageFloat32AtomicAdd );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics )
&& ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd )
&& ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics )
&& ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd )
&& ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics )
&& ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd )
&& ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics )
&& ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd )
&& ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics )
&& ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd )
&& ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics )
&& ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd );
#endif
}
bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT>
{
using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceShaderAtomicInt64Features, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderAtomicInt64Features.html
struct PhysicalDeviceShaderAtomicInt64Features
{
using NativeType = VkPhysicalDeviceShaderAtomicInt64Features;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ }, shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast<PhysicalDeviceShaderAtomicInt64Features const *>( &rhs ) )
{}
PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features*>( this );
}
operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features*>( this );
}
operator VkPhysicalDeviceShaderAtomicInt64Features const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features*>( this );
}
operator VkPhysicalDeviceShaderAtomicInt64Features *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
&& ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
#endif
}
bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicInt64Features>
{
using Type = PhysicalDeviceShaderAtomicInt64Features;
};
using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;
// wrapper struct for struct VkPhysicalDeviceShaderBfloat16FeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderBfloat16FeaturesKHR.html
struct PhysicalDeviceShaderBfloat16FeaturesKHR
{
using NativeType = VkPhysicalDeviceShaderBfloat16FeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderBfloat16FeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderBfloat16FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16Type_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16DotProduct_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16CooperativeMatrix_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderBFloat16Type{ shaderBFloat16Type_ }, shaderBFloat16DotProduct{ shaderBFloat16DotProduct_ }, shaderBFloat16CooperativeMatrix{ shaderBFloat16CooperativeMatrix_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderBfloat16FeaturesKHR( PhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderBfloat16FeaturesKHR( VkPhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderBfloat16FeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderBfloat16FeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceShaderBfloat16FeaturesKHR & operator=( PhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderBfloat16FeaturesKHR & operator=( VkPhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderBfloat16FeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderBfloat16FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderBfloat16FeaturesKHR & setShaderBFloat16Type( VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16Type_ ) VULKAN_HPP_NOEXCEPT
{
shaderBFloat16Type = shaderBFloat16Type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderBfloat16FeaturesKHR & setShaderBFloat16DotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16DotProduct_ ) VULKAN_HPP_NOEXCEPT
{
shaderBFloat16DotProduct = shaderBFloat16DotProduct_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderBfloat16FeaturesKHR & setShaderBFloat16CooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16CooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
{
shaderBFloat16CooperativeMatrix = shaderBFloat16CooperativeMatrix_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderBfloat16FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderBfloat16FeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderBfloat16FeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderBfloat16FeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderBfloat16FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderBfloat16FeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderBfloat16FeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderBfloat16FeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderBFloat16Type, shaderBFloat16DotProduct, shaderBFloat16CooperativeMatrix );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderBfloat16FeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderBFloat16Type == rhs.shaderBFloat16Type )
&& ( shaderBFloat16DotProduct == rhs.shaderBFloat16DotProduct )
&& ( shaderBFloat16CooperativeMatrix == rhs.shaderBFloat16CooperativeMatrix );
#endif
}
bool operator!=( PhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderBfloat16FeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16Type = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16DotProduct = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16CooperativeMatrix = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderBfloat16FeaturesKHR>
{
using Type = PhysicalDeviceShaderBfloat16FeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceShaderClockFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderClockFeaturesKHR.html
struct PhysicalDeviceShaderClockFeaturesKHR
{
using NativeType = VkPhysicalDeviceShaderClockFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderSubgroupClock{ shaderSubgroupClock_ }, shaderDeviceClock{ shaderDeviceClock_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupClock = shaderSubgroupClock_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
{
shaderDeviceClock = shaderDeviceClock_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderClockFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderClockFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderSubgroupClock, shaderDeviceClock );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderSubgroupClock == rhs.shaderSubgroupClock )
&& ( shaderDeviceClock == rhs.shaderDeviceClock );
#endif
}
bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderClockFeaturesKHR>
{
using Type = PhysicalDeviceShaderClockFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM.html
struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM
{
using NativeType = VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM(VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderCoreBuiltins{ shaderCoreBuiltins_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderCoreBuiltinsFeaturesARM( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderCoreBuiltinsFeaturesARM( *reinterpret_cast<PhysicalDeviceShaderCoreBuiltinsFeaturesARM const *>( &rhs ) )
{}
PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & setShaderCoreBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ ) VULKAN_HPP_NOEXCEPT
{
shaderCoreBuiltins = shaderCoreBuiltins_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM*>( this );
}
operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM*>( this );
}
operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM*>( this );
}
operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderCoreBuiltins );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderCoreBuiltins == rhs.shaderCoreBuiltins );
#endif
}
bool operator!=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM>
{
using Type = PhysicalDeviceShaderCoreBuiltinsFeaturesARM;
};
// wrapper struct for struct VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM.html
struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM
{
using NativeType = VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsPropertiesARM(uint64_t shaderCoreMask_ = {}, uint32_t shaderCoreCount_ = {}, uint32_t shaderWarpsPerCore_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderCoreMask{ shaderCoreMask_ }, shaderCoreCount{ shaderCoreCount_ }, shaderWarpsPerCore{ shaderWarpsPerCore_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsPropertiesARM( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderCoreBuiltinsPropertiesARM( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderCoreBuiltinsPropertiesARM( *reinterpret_cast<PhysicalDeviceShaderCoreBuiltinsPropertiesARM const *>( &rhs ) )
{}
PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM*>( this );
}
operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM*>( this );
}
operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM*>( this );
}
operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderCoreMask, shaderCoreCount, shaderWarpsPerCore );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderCoreMask == rhs.shaderCoreMask )
&& ( shaderCoreCount == rhs.shaderCoreCount )
&& ( shaderWarpsPerCore == rhs.shaderWarpsPerCore );
#endif
}
bool operator!=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM;
void * pNext = {};
uint64_t shaderCoreMask = {};
uint32_t shaderCoreCount = {};
uint32_t shaderWarpsPerCore = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM>
{
using Type = PhysicalDeviceShaderCoreBuiltinsPropertiesARM;
};
// wrapper struct for struct VkPhysicalDeviceShaderCoreProperties2AMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCoreProperties2AMD.html
struct PhysicalDeviceShaderCoreProperties2AMD
{
using NativeType = VkPhysicalDeviceShaderCoreProperties2AMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD(VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, uint32_t activeComputeUnitCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderCoreFeatures{ shaderCoreFeatures_ }, activeComputeUnitCount{ activeComputeUnitCount_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderCoreProperties2AMD( *reinterpret_cast<PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs ) )
{}
PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
}
operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
}
operator VkPhysicalDeviceShaderCoreProperties2AMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
}
operator VkPhysicalDeviceShaderCoreProperties2AMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderCoreFeatures, activeComputeUnitCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderCoreFeatures == rhs.shaderCoreFeatures )
&& ( activeComputeUnitCount == rhs.activeComputeUnitCount );
#endif
}
bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {};
uint32_t activeComputeUnitCount = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreProperties2AMD>
{
using Type = PhysicalDeviceShaderCoreProperties2AMD;
};
// wrapper struct for struct VkPhysicalDeviceShaderCorePropertiesAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCorePropertiesAMD.html
struct PhysicalDeviceShaderCorePropertiesAMD
{
using NativeType = VkPhysicalDeviceShaderCorePropertiesAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD(uint32_t shaderEngineCount_ = {}, uint32_t shaderArraysPerEngineCount_ = {}, uint32_t computeUnitsPerShaderArray_ = {}, uint32_t simdPerComputeUnit_ = {}, uint32_t wavefrontsPerSimd_ = {}, uint32_t wavefrontSize_ = {}, uint32_t sgprsPerSimd_ = {}, uint32_t minSgprAllocation_ = {}, uint32_t maxSgprAllocation_ = {}, uint32_t sgprAllocationGranularity_ = {}, uint32_t vgprsPerSimd_ = {}, uint32_t minVgprAllocation_ = {}, uint32_t maxVgprAllocation_ = {}, uint32_t vgprAllocationGranularity_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderEngineCount{ shaderEngineCount_ }, shaderArraysPerEngineCount{ shaderArraysPerEngineCount_ }, computeUnitsPerShaderArray{ computeUnitsPerShaderArray_ }, simdPerComputeUnit{ simdPerComputeUnit_ }, wavefrontsPerSimd{ wavefrontsPerSimd_ }, wavefrontSize{ wavefrontSize_ }, sgprsPerSimd{ sgprsPerSimd_ }, minSgprAllocation{ minSgprAllocation_ }, maxSgprAllocation{ maxSgprAllocation_ }, sgprAllocationGranularity{ sgprAllocationGranularity_ }, vgprsPerSimd{ vgprsPerSimd_ }, minVgprAllocation{ minVgprAllocation_ }, maxVgprAllocation{ maxVgprAllocation_ }, vgprAllocationGranularity{ vgprAllocationGranularity_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderCorePropertiesAMD( *reinterpret_cast<PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs ) )
{}
PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
}
operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
}
operator VkPhysicalDeviceShaderCorePropertiesAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
}
operator VkPhysicalDeviceShaderCorePropertiesAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderEngineCount, shaderArraysPerEngineCount, computeUnitsPerShaderArray, simdPerComputeUnit, wavefrontsPerSimd, wavefrontSize, sgprsPerSimd, minSgprAllocation, maxSgprAllocation, sgprAllocationGranularity, vgprsPerSimd, minVgprAllocation, maxVgprAllocation, vgprAllocationGranularity );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderEngineCount == rhs.shaderEngineCount )
&& ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount )
&& ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray )
&& ( simdPerComputeUnit == rhs.simdPerComputeUnit )
&& ( wavefrontsPerSimd == rhs.wavefrontsPerSimd )
&& ( wavefrontSize == rhs.wavefrontSize )
&& ( sgprsPerSimd == rhs.sgprsPerSimd )
&& ( minSgprAllocation == rhs.minSgprAllocation )
&& ( maxSgprAllocation == rhs.maxSgprAllocation )
&& ( sgprAllocationGranularity == rhs.sgprAllocationGranularity )
&& ( vgprsPerSimd == rhs.vgprsPerSimd )
&& ( minVgprAllocation == rhs.minVgprAllocation )
&& ( maxVgprAllocation == rhs.maxVgprAllocation )
&& ( vgprAllocationGranularity == rhs.vgprAllocationGranularity );
#endif
}
bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
void * pNext = {};
uint32_t shaderEngineCount = {};
uint32_t shaderArraysPerEngineCount = {};
uint32_t computeUnitsPerShaderArray = {};
uint32_t simdPerComputeUnit = {};
uint32_t wavefrontsPerSimd = {};
uint32_t wavefrontSize = {};
uint32_t sgprsPerSimd = {};
uint32_t minSgprAllocation = {};
uint32_t maxSgprAllocation = {};
uint32_t sgprAllocationGranularity = {};
uint32_t vgprsPerSimd = {};
uint32_t minVgprAllocation = {};
uint32_t maxVgprAllocation = {};
uint32_t vgprAllocationGranularity = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCorePropertiesAMD>
{
using Type = PhysicalDeviceShaderCorePropertiesAMD;
};
// wrapper struct for struct VkPhysicalDeviceShaderCorePropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCorePropertiesARM.html
struct PhysicalDeviceShaderCorePropertiesARM
{
using NativeType = VkPhysicalDeviceShaderCorePropertiesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesARM(uint32_t pixelRate_ = {}, uint32_t texelRate_ = {}, uint32_t fmaRate_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pixelRate{ pixelRate_ }, texelRate{ texelRate_ }, fmaRate{ fmaRate_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesARM( PhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderCorePropertiesARM( VkPhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderCorePropertiesARM( *reinterpret_cast<PhysicalDeviceShaderCorePropertiesARM const *>( &rhs ) )
{}
PhysicalDeviceShaderCorePropertiesARM & operator=( PhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderCorePropertiesARM & operator=( VkPhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderCorePropertiesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesARM*>( this );
}
operator VkPhysicalDeviceShaderCorePropertiesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesARM*>( this );
}
operator VkPhysicalDeviceShaderCorePropertiesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesARM*>( this );
}
operator VkPhysicalDeviceShaderCorePropertiesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pixelRate, texelRate, fmaRate );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderCorePropertiesARM const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderCorePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pixelRate == rhs.pixelRate )
&& ( texelRate == rhs.texelRate )
&& ( fmaRate == rhs.fmaRate );
#endif
}
bool operator!=( PhysicalDeviceShaderCorePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesARM;
void * pNext = {};
uint32_t pixelRate = {};
uint32_t texelRate = {};
uint32_t fmaRate = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCorePropertiesARM>
{
using Type = PhysicalDeviceShaderCorePropertiesARM;
};
// wrapper struct for struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.html
struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures
{
using NativeType = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderDemoteToHelperInvocation{ shaderDemoteToHelperInvocation_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderDemoteToHelperInvocationFeatures( *reinterpret_cast<PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs ) )
{}
PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
{
shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures*>( this );
}
operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures*>( this );
}
operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures*>( this );
}
operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderDemoteToHelperInvocation );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation );
#endif
}
bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures>
{
using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
};
using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
// wrapper struct for struct VkPhysicalDeviceShaderDrawParametersFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderDrawParametersFeatures.html
struct PhysicalDeviceShaderDrawParametersFeatures
{
using NativeType = VkPhysicalDeviceShaderDrawParametersFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderDrawParameters{ shaderDrawParameters_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderDrawParametersFeatures( *reinterpret_cast<PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs ) )
{}
PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
{
shaderDrawParameters = shaderDrawParameters_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
}
operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
}
operator VkPhysicalDeviceShaderDrawParametersFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
}
operator VkPhysicalDeviceShaderDrawParametersFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderDrawParameters );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderDrawParameters == rhs.shaderDrawParameters );
#endif
}
bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDrawParametersFeatures>
{
using Type = PhysicalDeviceShaderDrawParametersFeatures;
};
using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
// wrapper struct for struct VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.html
struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD
{
using NativeType = VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD(VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderEarlyAndLateFragmentTests{ shaderEarlyAndLateFragmentTests_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( *reinterpret_cast<PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const *>( &rhs ) )
{}
PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & operator=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & operator=( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & setShaderEarlyAndLateFragmentTests( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ ) VULKAN_HPP_NOEXCEPT
{
shaderEarlyAndLateFragmentTests = shaderEarlyAndLateFragmentTests_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD*>( this );
}
operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD*>( this );
}
operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD*>( this );
}
operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderEarlyAndLateFragmentTests );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderEarlyAndLateFragmentTests == rhs.shaderEarlyAndLateFragmentTests );
#endif
}
bool operator!=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>
{
using Type = PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
};
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkPhysicalDeviceShaderEnqueueFeaturesAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderEnqueueFeaturesAMDX.html
struct PhysicalDeviceShaderEnqueueFeaturesAMDX
{
using NativeType = VkPhysicalDeviceShaderEnqueueFeaturesAMDX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX(VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderEnqueue{ shaderEnqueue_ }, shaderMeshEnqueue{ shaderMeshEnqueue_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderEnqueueFeaturesAMDX( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderEnqueueFeaturesAMDX( *reinterpret_cast<PhysicalDeviceShaderEnqueueFeaturesAMDX const *>( &rhs ) )
{}
PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setShaderEnqueue( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ ) VULKAN_HPP_NOEXCEPT
{
shaderEnqueue = shaderEnqueue_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setShaderMeshEnqueue( VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue_ ) VULKAN_HPP_NOEXCEPT
{
shaderMeshEnqueue = shaderMeshEnqueue_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderEnqueueFeaturesAMDX*>( this );
}
operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderEnqueueFeaturesAMDX*>( this );
}
operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderEnqueueFeaturesAMDX*>( this );
}
operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderEnqueueFeaturesAMDX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderEnqueue, shaderMeshEnqueue );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderEnqueueFeaturesAMDX const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderEnqueue == rhs.shaderEnqueue )
&& ( shaderMeshEnqueue == rhs.shaderMeshEnqueue );
#endif
}
bool operator!=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX>
{
using Type = PhysicalDeviceShaderEnqueueFeaturesAMDX;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderEnqueuePropertiesAMDX.html
struct PhysicalDeviceShaderEnqueuePropertiesAMDX
{
using NativeType = VkPhysicalDeviceShaderEnqueuePropertiesAMDX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX(uint32_t maxExecutionGraphDepth_ = {}, uint32_t maxExecutionGraphShaderOutputNodes_ = {}, uint32_t maxExecutionGraphShaderPayloadSize_ = {}, uint32_t maxExecutionGraphShaderPayloadCount_ = {}, uint32_t executionGraphDispatchAddressAlignment_ = {}, std::array<uint32_t,3> const & maxExecutionGraphWorkgroupCount_ = {}, uint32_t maxExecutionGraphWorkgroups_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxExecutionGraphDepth{ maxExecutionGraphDepth_ }, maxExecutionGraphShaderOutputNodes{ maxExecutionGraphShaderOutputNodes_ }, maxExecutionGraphShaderPayloadSize{ maxExecutionGraphShaderPayloadSize_ }, maxExecutionGraphShaderPayloadCount{ maxExecutionGraphShaderPayloadCount_ }, executionGraphDispatchAddressAlignment{ executionGraphDispatchAddressAlignment_ }, maxExecutionGraphWorkgroupCount{ maxExecutionGraphWorkgroupCount_ }, maxExecutionGraphWorkgroups{ maxExecutionGraphWorkgroups_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderEnqueuePropertiesAMDX( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderEnqueuePropertiesAMDX( *reinterpret_cast<PhysicalDeviceShaderEnqueuePropertiesAMDX const *>( &rhs ) )
{}
PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderEnqueuePropertiesAMDX*>( this );
}
operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderEnqueuePropertiesAMDX*>( this );
}
operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderEnqueuePropertiesAMDX*>( this );
}
operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderEnqueuePropertiesAMDX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxExecutionGraphDepth, maxExecutionGraphShaderOutputNodes, maxExecutionGraphShaderPayloadSize, maxExecutionGraphShaderPayloadCount, executionGraphDispatchAddressAlignment, maxExecutionGraphWorkgroupCount, maxExecutionGraphWorkgroups );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderEnqueuePropertiesAMDX const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxExecutionGraphDepth == rhs.maxExecutionGraphDepth )
&& ( maxExecutionGraphShaderOutputNodes == rhs.maxExecutionGraphShaderOutputNodes )
&& ( maxExecutionGraphShaderPayloadSize == rhs.maxExecutionGraphShaderPayloadSize )
&& ( maxExecutionGraphShaderPayloadCount == rhs.maxExecutionGraphShaderPayloadCount )
&& ( executionGraphDispatchAddressAlignment == rhs.executionGraphDispatchAddressAlignment )
&& ( maxExecutionGraphWorkgroupCount == rhs.maxExecutionGraphWorkgroupCount )
&& ( maxExecutionGraphWorkgroups == rhs.maxExecutionGraphWorkgroups );
#endif
}
bool operator!=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX;
void * pNext = {};
uint32_t maxExecutionGraphDepth = {};
uint32_t maxExecutionGraphShaderOutputNodes = {};
uint32_t maxExecutionGraphShaderPayloadSize = {};
uint32_t maxExecutionGraphShaderPayloadCount = {};
uint32_t executionGraphDispatchAddressAlignment = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxExecutionGraphWorkgroupCount = {};
uint32_t maxExecutionGraphWorkgroups = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX>
{
using Type = PhysicalDeviceShaderEnqueuePropertiesAMDX;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
// wrapper struct for struct VkPhysicalDeviceShaderExpectAssumeFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderExpectAssumeFeatures.html
struct PhysicalDeviceShaderExpectAssumeFeatures
{
using NativeType = VkPhysicalDeviceShaderExpectAssumeFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderExpectAssumeFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderExpectAssume{ shaderExpectAssume_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeatures( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderExpectAssumeFeatures( VkPhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderExpectAssumeFeatures( *reinterpret_cast<PhysicalDeviceShaderExpectAssumeFeatures const *>( &rhs ) )
{}
PhysicalDeviceShaderExpectAssumeFeatures & operator=( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderExpectAssumeFeatures & operator=( VkPhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderExpectAssumeFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderExpectAssumeFeatures & setShaderExpectAssume( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ ) VULKAN_HPP_NOEXCEPT
{
shaderExpectAssume = shaderExpectAssume_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderExpectAssumeFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderExpectAssumeFeatures*>( this );
}
operator VkPhysicalDeviceShaderExpectAssumeFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderExpectAssumeFeatures*>( this );
}
operator VkPhysicalDeviceShaderExpectAssumeFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderExpectAssumeFeatures*>( this );
}
operator VkPhysicalDeviceShaderExpectAssumeFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderExpectAssumeFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderExpectAssume );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderExpectAssumeFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderExpectAssume == rhs.shaderExpectAssume );
#endif
}
bool operator!=( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderExpectAssumeFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderExpectAssumeFeatures>
{
using Type = PhysicalDeviceShaderExpectAssumeFeatures;
};
using PhysicalDeviceShaderExpectAssumeFeaturesKHR = PhysicalDeviceShaderExpectAssumeFeatures;
// wrapper struct for struct VkPhysicalDeviceShaderFloat16Int8Features, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderFloat16Int8Features.html
struct PhysicalDeviceShaderFloat16Int8Features
{
using NativeType = VkPhysicalDeviceShaderFloat16Int8Features;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderFloat16{ shaderFloat16_ }, shaderInt8{ shaderInt8_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast<PhysicalDeviceShaderFloat16Int8Features const *>( &rhs ) )
{}
PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloat16 = shaderFloat16_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt8 = shaderInt8_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features*>( this );
}
operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features*>( this );
}
operator VkPhysicalDeviceShaderFloat16Int8Features const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features*>( this );
}
operator VkPhysicalDeviceShaderFloat16Int8Features *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderFloat16, shaderInt8 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderFloat16 == rhs.shaderFloat16 )
&& ( shaderInt8 == rhs.shaderInt8 );
#endif
}
bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloat16Int8Features>
{
using Type = PhysicalDeviceShaderFloat16Int8Features;
};
using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
// wrapper struct for struct VkPhysicalDeviceShaderFloat8FeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderFloat8FeaturesEXT.html
struct PhysicalDeviceShaderFloat8FeaturesEXT
{
using NativeType = VkPhysicalDeviceShaderFloat8FeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat8FeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat8FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat8CooperativeMatrix_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderFloat8{ shaderFloat8_ }, shaderFloat8CooperativeMatrix{ shaderFloat8CooperativeMatrix_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat8FeaturesEXT( PhysicalDeviceShaderFloat8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderFloat8FeaturesEXT( VkPhysicalDeviceShaderFloat8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderFloat8FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderFloat8FeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceShaderFloat8FeaturesEXT & operator=( PhysicalDeviceShaderFloat8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderFloat8FeaturesEXT & operator=( VkPhysicalDeviceShaderFloat8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat8FeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat8FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat8FeaturesEXT & setShaderFloat8( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat8_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloat8 = shaderFloat8_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat8FeaturesEXT & setShaderFloat8CooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat8CooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloat8CooperativeMatrix = shaderFloat8CooperativeMatrix_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderFloat8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderFloat8FeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderFloat8FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderFloat8FeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderFloat8FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderFloat8FeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderFloat8FeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderFloat8FeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderFloat8, shaderFloat8CooperativeMatrix );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderFloat8FeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderFloat8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderFloat8 == rhs.shaderFloat8 )
&& ( shaderFloat8CooperativeMatrix == rhs.shaderFloat8CooperativeMatrix );
#endif
}
bool operator!=( PhysicalDeviceShaderFloat8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat8FeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat8 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat8CooperativeMatrix = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloat8FeaturesEXT>
{
using Type = PhysicalDeviceShaderFloat8FeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceShaderFloatControls2Features, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderFloatControls2Features.html
struct PhysicalDeviceShaderFloatControls2Features
{
using NativeType = VkPhysicalDeviceShaderFloatControls2Features;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloatControls2Features;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloatControls2Features(VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderFloatControls2{ shaderFloatControls2_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloatControls2Features( PhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderFloatControls2Features( VkPhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderFloatControls2Features( *reinterpret_cast<PhysicalDeviceShaderFloatControls2Features const *>( &rhs ) )
{}
PhysicalDeviceShaderFloatControls2Features & operator=( PhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderFloatControls2Features & operator=( VkPhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2Features const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloatControls2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloatControls2Features & setShaderFloatControls2( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloatControls2 = shaderFloatControls2_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderFloatControls2Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderFloatControls2Features*>( this );
}
operator VkPhysicalDeviceShaderFloatControls2Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderFloatControls2Features*>( this );
}
operator VkPhysicalDeviceShaderFloatControls2Features const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderFloatControls2Features*>( this );
}
operator VkPhysicalDeviceShaderFloatControls2Features *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderFloatControls2Features*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderFloatControls2 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderFloatControls2Features const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderFloatControls2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderFloatControls2 == rhs.shaderFloatControls2 );
#endif
}
bool operator!=( PhysicalDeviceShaderFloatControls2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloatControls2Features;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloatControls2Features>
{
using Type = PhysicalDeviceShaderFloatControls2Features;
};
using PhysicalDeviceShaderFloatControls2FeaturesKHR = PhysicalDeviceShaderFloatControls2Features;
// wrapper struct for struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT.html
struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT
{
using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderImageInt64Atomics{ shaderImageInt64Atomics_ }, sparseImageInt64Atomics{ sparseImageInt64Atomics_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderImageInt64Atomics = shaderImageInt64Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
sparseImageInt64Atomics = sparseImageInt64Atomics_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics )
&& ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics );
#endif
}
bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT>
{
using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceShaderImageFootprintFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderImageFootprintFeaturesNV.html
struct PhysicalDeviceShaderImageFootprintFeaturesNV
{
using NativeType = VkPhysicalDeviceShaderImageFootprintFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageFootprint{ imageFootprint_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderImageFootprintFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
{
imageFootprint = imageFootprint_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
}
operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
}
operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
}
operator VkPhysicalDeviceShaderImageFootprintFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageFootprint );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageFootprint == rhs.imageFootprint );
#endif
}
bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV>
{
using Type = PhysicalDeviceShaderImageFootprintFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceShaderIntegerDotProductFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderIntegerDotProductFeatures.html
struct PhysicalDeviceShaderIntegerDotProductFeatures
{
using NativeType = VkPhysicalDeviceShaderIntegerDotProductFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderIntegerDotProduct{ shaderIntegerDotProduct_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderIntegerDotProductFeatures( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderIntegerDotProductFeatures( *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs ) )
{}
PhysicalDeviceShaderIntegerDotProductFeatures & operator=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderIntegerDotProductFeatures & operator=( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
{
shaderIntegerDotProduct = shaderIntegerDotProduct_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductFeatures*>( this );
}
operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductFeatures*>( this );
}
operator VkPhysicalDeviceShaderIntegerDotProductFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductFeatures*>( this );
}
operator VkPhysicalDeviceShaderIntegerDotProductFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderIntegerDotProduct );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderIntegerDotProductFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct );
#endif
}
bool operator!=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures>
{
using Type = PhysicalDeviceShaderIntegerDotProductFeatures;
};
using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures;
// wrapper struct for struct VkPhysicalDeviceShaderIntegerDotProductProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderIntegerDotProductProperties.html
struct PhysicalDeviceShaderIntegerDotProductProperties
{
using NativeType = VkPhysicalDeviceShaderIntegerDotProductProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties(VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, integerDotProduct8BitUnsignedAccelerated{ integerDotProduct8BitUnsignedAccelerated_ }, integerDotProduct8BitSignedAccelerated{ integerDotProduct8BitSignedAccelerated_ }, integerDotProduct8BitMixedSignednessAccelerated{ integerDotProduct8BitMixedSignednessAccelerated_ }, integerDotProduct4x8BitPackedUnsignedAccelerated{ integerDotProduct4x8BitPackedUnsignedAccelerated_ }, integerDotProduct4x8BitPackedSignedAccelerated{ integerDotProduct4x8BitPackedSignedAccelerated_ }, integerDotProduct4x8BitPackedMixedSignednessAccelerated{ integerDotProduct4x8BitPackedMixedSignednessAccelerated_ }, integerDotProduct16BitUnsignedAccelerated{ integerDotProduct16BitUnsignedAccelerated_ }, integerDotProduct16BitSignedAccelerated{ integerDotProduct16BitSignedAccelerated_ }, integerDotProduct16BitMixedSignednessAccelerated{ integerDotProduct16BitMixedSignednessAccelerated_ }, integerDotProduct32BitUnsignedAccelerated{ integerDotProduct32BitUnsignedAccelerated_ }, integerDotProduct32BitSignedAccelerated{ integerDotProduct32BitSignedAccelerated_ }, integerDotProduct32BitMixedSignednessAccelerated{ integerDotProduct32BitMixedSignednessAccelerated_ }, integerDotProduct64BitUnsignedAccelerated{ integerDotProduct64BitUnsignedAccelerated_ }, integerDotProduct64BitSignedAccelerated{ integerDotProduct64BitSignedAccelerated_ }, integerDotProduct64BitMixedSignednessAccelerated{ integerDotProduct64BitMixedSignednessAccelerated_ }, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ }, integerDotProductAccumulatingSaturating8BitSignedAccelerated{ integerDotProductAccumulatingSaturating8BitSignedAccelerated_ }, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ }, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ }, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ }, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ }, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ }, integerDotProductAccumulatingSaturating16BitSignedAccelerated{ integerDotProductAccumulatingSaturating16BitSignedAccelerated_ }, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ }, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ }, integerDotProductAccumulatingSaturating32BitSignedAccelerated{ integerDotProductAccumulatingSaturating32BitSignedAccelerated_ }, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ }, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ }, integerDotProductAccumulatingSaturating64BitSignedAccelerated{ integerDotProductAccumulatingSaturating64BitSignedAccelerated_ }, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderIntegerDotProductProperties( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderIntegerDotProductProperties( *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs ) )
{}
PhysicalDeviceShaderIntegerDotProductProperties & operator=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderIntegerDotProductProperties & operator=( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductProperties*>( this );
}
operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductProperties*>( this );
}
operator VkPhysicalDeviceShaderIntegerDotProductProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductProperties*>( this );
}
operator VkPhysicalDeviceShaderIntegerDotProductProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, integerDotProduct8BitUnsignedAccelerated, integerDotProduct8BitSignedAccelerated, integerDotProduct8BitMixedSignednessAccelerated, integerDotProduct4x8BitPackedUnsignedAccelerated, integerDotProduct4x8BitPackedSignedAccelerated, integerDotProduct4x8BitPackedMixedSignednessAccelerated, integerDotProduct16BitUnsignedAccelerated, integerDotProduct16BitSignedAccelerated, integerDotProduct16BitMixedSignednessAccelerated, integerDotProduct32BitUnsignedAccelerated, integerDotProduct32BitSignedAccelerated, integerDotProduct32BitMixedSignednessAccelerated, integerDotProduct64BitUnsignedAccelerated, integerDotProduct64BitSignedAccelerated, integerDotProduct64BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, integerDotProductAccumulatingSaturating8BitSignedAccelerated, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, integerDotProductAccumulatingSaturating16BitSignedAccelerated, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, integerDotProductAccumulatingSaturating32BitSignedAccelerated, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, integerDotProductAccumulatingSaturating64BitSignedAccelerated, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderIntegerDotProductProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated )
&& ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated )
&& ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated )
&& ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated )
&& ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated )
&& ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated )
&& ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated )
&& ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated )
&& ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated )
&& ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated )
&& ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated )
&& ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated )
&& ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated )
&& ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated )
&& ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated )
&& ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated )
&& ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated )
&& ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated )
&& ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated )
&& ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated )
&& ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated )
&& ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated )
&& ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated )
&& ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated )
&& ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated )
&& ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated )
&& ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated )
&& ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated )
&& ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated )
&& ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
#endif
}
bool operator!=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerDotProductProperties>
{
using Type = PhysicalDeviceShaderIntegerDotProductProperties;
};
using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties;
// wrapper struct for struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL.html
struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
{
using NativeType = VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderIntegerFunctions2{ shaderIntegerFunctions2_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( *reinterpret_cast<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs ) )
{}
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
{
shaderIntegerFunctions2 = shaderIntegerFunctions2_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
}
operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
}
operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
}
operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderIntegerFunctions2 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 );
#endif
}
bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>
{
using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
};
// wrapper struct for struct VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR.html
struct PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR
{
using NativeType = VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderMaximalReconvergence_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderMaximalReconvergence{ shaderMaximalReconvergence_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & operator=( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & operator=( VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & setShaderMaximalReconvergence( VULKAN_HPP_NAMESPACE::Bool32 shaderMaximalReconvergence_ ) VULKAN_HPP_NOEXCEPT
{
shaderMaximalReconvergence = shaderMaximalReconvergence_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderMaximalReconvergence );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderMaximalReconvergence == rhs.shaderMaximalReconvergence );
#endif
}
bool operator!=( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderMaximalReconvergence = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR>
{
using Type = PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT.html
struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT
{
using NativeType = VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderModuleIdentifierFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderModuleIdentifier{ shaderModuleIdentifier_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderModuleIdentifierFeaturesEXT( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderModuleIdentifierFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderModuleIdentifierFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & setShaderModuleIdentifier( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ ) VULKAN_HPP_NOEXCEPT
{
shaderModuleIdentifier = shaderModuleIdentifier_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderModuleIdentifier );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderModuleIdentifier == rhs.shaderModuleIdentifier );
#endif
}
bool operator!=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT>
{
using Type = PhysicalDeviceShaderModuleIdentifierFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT.html
struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT
{
using NativeType = VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierPropertiesEXT(std::array<uint8_t,VK_UUID_SIZE> const & shaderModuleIdentifierAlgorithmUUID_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderModuleIdentifierAlgorithmUUID{ shaderModuleIdentifierAlgorithmUUID_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierPropertiesEXT( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderModuleIdentifierPropertiesEXT( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderModuleIdentifierPropertiesEXT( *reinterpret_cast<PhysicalDeviceShaderModuleIdentifierPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceShaderModuleIdentifierPropertiesEXT & operator=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderModuleIdentifierPropertiesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT*>( this );
}
operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT*>( this );
}
operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT*>( this );
}
operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderModuleIdentifierAlgorithmUUID );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderModuleIdentifierAlgorithmUUID == rhs.shaderModuleIdentifierAlgorithmUUID );
#endif
}
bool operator!=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> shaderModuleIdentifierAlgorithmUUID = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT>
{
using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceShaderObjectFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderObjectFeaturesEXT.html
struct PhysicalDeviceShaderObjectFeaturesEXT
{
using NativeType = VkPhysicalDeviceShaderObjectFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderObject{ shaderObject_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderObjectFeaturesEXT( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderObjectFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderObjectFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceShaderObjectFeaturesEXT & operator=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderObjectFeaturesEXT & operator=( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setShaderObject( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ ) VULKAN_HPP_NOEXCEPT
{
shaderObject = shaderObject_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderObjectFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderObjectFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderObjectFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderObjectFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderObjectFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderObjectFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderObjectFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderObjectFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderObject );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderObjectFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderObject == rhs.shaderObject );
#endif
}
bool operator!=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderObject = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderObjectFeaturesEXT>
{
using Type = PhysicalDeviceShaderObjectFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceShaderObjectPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderObjectPropertiesEXT.html
struct PhysicalDeviceShaderObjectPropertiesEXT
{
using NativeType = VkPhysicalDeviceShaderObjectPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT(std::array<uint8_t,VK_UUID_SIZE> const & shaderBinaryUUID_ = {}, uint32_t shaderBinaryVersion_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderBinaryUUID{ shaderBinaryUUID_ }, shaderBinaryVersion{ shaderBinaryVersion_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderObjectPropertiesEXT( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderObjectPropertiesEXT( *reinterpret_cast<PhysicalDeviceShaderObjectPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceShaderObjectPropertiesEXT & operator=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderObjectPropertiesEXT & operator=( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderObjectPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderObjectPropertiesEXT*>( this );
}
operator VkPhysicalDeviceShaderObjectPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderObjectPropertiesEXT*>( this );
}
operator VkPhysicalDeviceShaderObjectPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderObjectPropertiesEXT*>( this );
}
operator VkPhysicalDeviceShaderObjectPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderObjectPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderBinaryUUID, shaderBinaryVersion );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderObjectPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderBinaryUUID == rhs.shaderBinaryUUID )
&& ( shaderBinaryVersion == rhs.shaderBinaryVersion );
#endif
}
bool operator!=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> shaderBinaryUUID = {};
uint32_t shaderBinaryVersion = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderObjectPropertiesEXT>
{
using Type = PhysicalDeviceShaderObjectPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceShaderQuadControlFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderQuadControlFeaturesKHR.html
struct PhysicalDeviceShaderQuadControlFeaturesKHR
{
using NativeType = VkPhysicalDeviceShaderQuadControlFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderQuadControlFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderQuadControl_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderQuadControl{ shaderQuadControl_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderQuadControlFeaturesKHR( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderQuadControlFeaturesKHR( VkPhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderQuadControlFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderQuadControlFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceShaderQuadControlFeaturesKHR & operator=( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderQuadControlFeaturesKHR & operator=( VkPhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderQuadControlFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderQuadControlFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderQuadControlFeaturesKHR & setShaderQuadControl( VULKAN_HPP_NAMESPACE::Bool32 shaderQuadControl_ ) VULKAN_HPP_NOEXCEPT
{
shaderQuadControl = shaderQuadControl_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderQuadControlFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderQuadControlFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderQuadControlFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderQuadControlFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderQuadControlFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderQuadControlFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderQuadControlFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderQuadControlFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderQuadControl );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderQuadControlFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderQuadControl == rhs.shaderQuadControl );
#endif
}
bool operator!=( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderQuadControl = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR>
{
using Type = PhysicalDeviceShaderQuadControlFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR.html
struct PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR
{
using NativeType = VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderRelaxedExtendedInstruction_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderRelaxedExtendedInstruction{ shaderRelaxedExtendedInstruction_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & operator=( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & operator=( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & setShaderRelaxedExtendedInstruction( VULKAN_HPP_NAMESPACE::Bool32 shaderRelaxedExtendedInstruction_ ) VULKAN_HPP_NOEXCEPT
{
shaderRelaxedExtendedInstruction = shaderRelaxedExtendedInstruction_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderRelaxedExtendedInstruction );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderRelaxedExtendedInstruction == rhs.shaderRelaxedExtendedInstruction );
#endif
}
bool operator!=( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRelaxedExtendedInstruction = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR>
{
using Type = PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT.html
struct PhysicalDeviceShaderReplicatedCompositesFeaturesEXT
{
using NativeType = VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderReplicatedCompositesFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderReplicatedComposites_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderReplicatedComposites{ shaderReplicatedComposites_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & operator=( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & operator=( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & setShaderReplicatedComposites( VULKAN_HPP_NAMESPACE::Bool32 shaderReplicatedComposites_ ) VULKAN_HPP_NOEXCEPT
{
shaderReplicatedComposites = shaderReplicatedComposites_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderReplicatedComposites );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderReplicatedComposites == rhs.shaderReplicatedComposites );
#endif
}
bool operator!=( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderReplicatedComposites = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT>
{
using Type = PhysicalDeviceShaderReplicatedCompositesFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSMBuiltinsFeaturesNV.html
struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
{
using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderSMBuiltins{ shaderSMBuiltins_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderSMBuiltinsFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
{
shaderSMBuiltins = shaderSMBuiltins_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
}
operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
}
operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
}
operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderSMBuiltins );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderSMBuiltins == rhs.shaderSMBuiltins );
#endif
}
bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV>
{
using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSMBuiltinsPropertiesNV.html
struct PhysicalDeviceShaderSMBuiltinsPropertiesNV
{
using NativeType = VkPhysicalDeviceShaderSMBuiltinsPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV(uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderSMCount{ shaderSMCount_ }, shaderWarpsPerSM{ shaderWarpsPerSM_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderSMBuiltinsPropertiesNV( *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
}
operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
}
operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
}
operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderSMCount, shaderWarpsPerSM );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderSMCount == rhs.shaderSMCount )
&& ( shaderWarpsPerSM == rhs.shaderWarpsPerSM );
#endif
}
bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
void * pNext = {};
uint32_t shaderSMCount = {};
uint32_t shaderWarpsPerSM = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV>
{
using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures.html
struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures
{
using NativeType = VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderSubgroupExtendedTypes{ shaderSubgroupExtendedTypes_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderSubgroupExtendedTypesFeatures( *reinterpret_cast<PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs ) )
{}
PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
}
operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
}
operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
}
operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderSubgroupExtendedTypes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes );
#endif
}
bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures>
{
using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
};
using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
// wrapper struct for struct VkPhysicalDeviceShaderSubgroupRotateFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSubgroupRotateFeatures.html
struct PhysicalDeviceShaderSubgroupRotateFeatures
{
using NativeType = VkPhysicalDeviceShaderSubgroupRotateFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupRotateFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderSubgroupRotate{ shaderSubgroupRotate_ }, shaderSubgroupRotateClustered{ shaderSubgroupRotateClustered_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupRotateFeatures( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderSubgroupRotateFeatures( VkPhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderSubgroupRotateFeatures( *reinterpret_cast<PhysicalDeviceShaderSubgroupRotateFeatures const *>( &rhs ) )
{}
PhysicalDeviceShaderSubgroupRotateFeatures & operator=( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderSubgroupRotateFeatures & operator=( VkPhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & setShaderSubgroupRotate( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupRotate = shaderSubgroupRotate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & setShaderSubgroupRotateClustered( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupRotateClustered = shaderSubgroupRotateClustered_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderSubgroupRotateFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupRotateFeatures*>( this );
}
operator VkPhysicalDeviceShaderSubgroupRotateFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupRotateFeatures*>( this );
}
operator VkPhysicalDeviceShaderSubgroupRotateFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderSubgroupRotateFeatures*>( this );
}
operator VkPhysicalDeviceShaderSubgroupRotateFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderSubgroupRotateFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderSubgroupRotate, shaderSubgroupRotateClustered );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderSubgroupRotateFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderSubgroupRotate == rhs.shaderSubgroupRotate )
&& ( shaderSubgroupRotateClustered == rhs.shaderSubgroupRotateClustered );
#endif
}
bool operator!=( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures>
{
using Type = PhysicalDeviceShaderSubgroupRotateFeatures;
};
using PhysicalDeviceShaderSubgroupRotateFeaturesKHR = PhysicalDeviceShaderSubgroupRotateFeatures;
// wrapper struct for struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.html
struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR
{
using NativeType = VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderSubgroupUniformControlFlow{ shaderSubgroupUniformControlFlow_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & operator=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setShaderSubgroupUniformControlFlow( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR*>( this );
}
operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderSubgroupUniformControlFlow );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow );
#endif
}
bool operator!=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>
{
using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceShaderTerminateInvocationFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderTerminateInvocationFeatures.html
struct PhysicalDeviceShaderTerminateInvocationFeatures
{
using NativeType = VkPhysicalDeviceShaderTerminateInvocationFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderTerminateInvocation{ shaderTerminateInvocation_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderTerminateInvocationFeatures( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderTerminateInvocationFeatures( *reinterpret_cast<PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs ) )
{}
PhysicalDeviceShaderTerminateInvocationFeatures & operator=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderTerminateInvocationFeatures & operator=( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
{
shaderTerminateInvocation = shaderTerminateInvocation_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeatures*>( this );
}
operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderTerminateInvocationFeatures*>( this );
}
operator VkPhysicalDeviceShaderTerminateInvocationFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeatures*>( this );
}
operator VkPhysicalDeviceShaderTerminateInvocationFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderTerminateInvocationFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderTerminateInvocation );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderTerminateInvocation == rhs.shaderTerminateInvocation );
#endif
}
bool operator!=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures>
{
using Type = PhysicalDeviceShaderTerminateInvocationFeatures;
};
using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures;
// wrapper struct for struct VkPhysicalDeviceShaderTileImageFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderTileImageFeaturesEXT.html
struct PhysicalDeviceShaderTileImageFeaturesEXT
{
using NativeType = VkPhysicalDeviceShaderTileImageFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderTileImageColorReadAccess{ shaderTileImageColorReadAccess_ }, shaderTileImageDepthReadAccess{ shaderTileImageDepthReadAccess_ }, shaderTileImageStencilReadAccess{ shaderTileImageStencilReadAccess_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderTileImageFeaturesEXT( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderTileImageFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderTileImageFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceShaderTileImageFeaturesEXT & operator=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderTileImageFeaturesEXT & operator=( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & setShaderTileImageColorReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ ) VULKAN_HPP_NOEXCEPT
{
shaderTileImageColorReadAccess = shaderTileImageColorReadAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & setShaderTileImageDepthReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ ) VULKAN_HPP_NOEXCEPT
{
shaderTileImageDepthReadAccess = shaderTileImageDepthReadAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & setShaderTileImageStencilReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ ) VULKAN_HPP_NOEXCEPT
{
shaderTileImageStencilReadAccess = shaderTileImageStencilReadAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShaderTileImageFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderTileImageFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderTileImageFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderTileImageFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderTileImageFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderTileImageFeaturesEXT*>( this );
}
operator VkPhysicalDeviceShaderTileImageFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderTileImageFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderTileImageColorReadAccess, shaderTileImageDepthReadAccess, shaderTileImageStencilReadAccess );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderTileImageFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderTileImageColorReadAccess == rhs.shaderTileImageColorReadAccess )
&& ( shaderTileImageDepthReadAccess == rhs.shaderTileImageDepthReadAccess )
&& ( shaderTileImageStencilReadAccess == rhs.shaderTileImageStencilReadAccess );
#endif
}
bool operator!=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT>
{
using Type = PhysicalDeviceShaderTileImageFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceShaderTileImagePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderTileImagePropertiesEXT.html
struct PhysicalDeviceShaderTileImagePropertiesEXT
{
using NativeType = VkPhysicalDeviceShaderTileImagePropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderTileImageCoherentReadAccelerated{ shaderTileImageCoherentReadAccelerated_ }, shaderTileImageReadSampleFromPixelRateInvocation{ shaderTileImageReadSampleFromPixelRateInvocation_ }, shaderTileImageReadFromHelperInvocation{ shaderTileImageReadFromHelperInvocation_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderTileImagePropertiesEXT( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderTileImagePropertiesEXT( *reinterpret_cast<PhysicalDeviceShaderTileImagePropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceShaderTileImagePropertiesEXT & operator=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShaderTileImagePropertiesEXT & operator=( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderTileImagePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderTileImagePropertiesEXT*>( this );
}
operator VkPhysicalDeviceShaderTileImagePropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderTileImagePropertiesEXT*>( this );
}
operator VkPhysicalDeviceShaderTileImagePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShaderTileImagePropertiesEXT*>( this );
}
operator VkPhysicalDeviceShaderTileImagePropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShaderTileImagePropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderTileImageCoherentReadAccelerated, shaderTileImageReadSampleFromPixelRateInvocation, shaderTileImageReadFromHelperInvocation );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShaderTileImagePropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderTileImageCoherentReadAccelerated == rhs.shaderTileImageCoherentReadAccelerated )
&& ( shaderTileImageReadSampleFromPixelRateInvocation == rhs.shaderTileImageReadSampleFromPixelRateInvocation )
&& ( shaderTileImageReadFromHelperInvocation == rhs.shaderTileImageReadFromHelperInvocation );
#endif
}
bool operator!=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT>
{
using Type = PhysicalDeviceShaderTileImagePropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceShadingRateImageFeaturesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShadingRateImageFeaturesNV.html
struct PhysicalDeviceShadingRateImageFeaturesNV
{
using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shadingRateImage{ shadingRateImage_ }, shadingRateCoarseSampleOrder{ shadingRateCoarseSampleOrder_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShadingRateImageFeaturesNV( *reinterpret_cast<PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs ) )
{}
PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateImage = shadingRateImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
}
operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
}
operator VkPhysicalDeviceShadingRateImageFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
}
operator VkPhysicalDeviceShadingRateImageFeaturesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shadingRateImage, shadingRateCoarseSampleOrder );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shadingRateImage == rhs.shadingRateImage )
&& ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder );
#endif
}
bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {};
VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImageFeaturesNV>
{
using Type = PhysicalDeviceShadingRateImageFeaturesNV;
};
// wrapper struct for struct VkPhysicalDeviceShadingRateImagePropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShadingRateImagePropertiesNV.html
struct PhysicalDeviceShadingRateImagePropertiesNV
{
using NativeType = VkPhysicalDeviceShadingRateImagePropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV(VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, uint32_t shadingRatePaletteSize_ = {}, uint32_t shadingRateMaxCoarseSamples_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shadingRateTexelSize{ shadingRateTexelSize_ }, shadingRatePaletteSize{ shadingRatePaletteSize_ }, shadingRateMaxCoarseSamples{ shadingRateMaxCoarseSamples_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShadingRateImagePropertiesNV( *reinterpret_cast<PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs ) )
{}
PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
}
operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
}
operator VkPhysicalDeviceShadingRateImagePropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
}
operator VkPhysicalDeviceShadingRateImagePropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shadingRateTexelSize, shadingRatePaletteSize, shadingRateMaxCoarseSamples );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default;
#else
bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shadingRateTexelSize == rhs.shadingRateTexelSize )
&& ( shadingRatePaletteSize == rhs.shadingRatePaletteSize )
&& ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples );
#endif
}
bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {};
uint32_t shadingRatePaletteSize = {};
uint32_t shadingRateMaxCoarseSamples = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImagePropertiesNV>
{
using Type = PhysicalDeviceShadingRateImagePropertiesNV;
};
// wrapper struct for struct VkPhysicalDeviceSparseImageFormatInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSparseImageFormatInfo2.html
struct PhysicalDeviceSparseImageFormatInfo2
{
using NativeType = VkPhysicalDeviceSparseImageFormatInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, format{ format_ }, type{ type_ }, samples{ samples_ }, usage{ usage_ }, tiling{ tiling_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast<PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs ) )
{}
PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
samples = samples_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
{
tiling = tiling_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( this );
}
operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>( this );
}
operator VkPhysicalDeviceSparseImageFormatInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( this );
}
operator VkPhysicalDeviceSparseImageFormatInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ImageType const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::ImageTiling const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, format, type, samples, usage, tiling );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default;
#else
bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( format == rhs.format )
&& ( type == rhs.type )
&& ( samples == rhs.samples )
&& ( usage == rhs.usage )
&& ( tiling == rhs.tiling );
#endif
}
bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSparseImageFormatInfo2>
{
using Type = PhysicalDeviceSparseImageFormatInfo2;
};
using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;
// wrapper struct for struct VkPhysicalDeviceSubgroupProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubgroupProperties.html
struct PhysicalDeviceSubgroupProperties
{
using NativeType = VkPhysicalDeviceSubgroupProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties(uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, subgroupSize{ subgroupSize_ }, supportedStages{ supportedStages_ }, supportedOperations{ supportedOperations_ }, quadOperationsInAllStages{ quadOperationsInAllStages_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSubgroupProperties( *reinterpret_cast<PhysicalDeviceSubgroupProperties const *>( &rhs ) )
{}
PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties*>( this );
}
operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties*>( this );
}
operator VkPhysicalDeviceSubgroupProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSubgroupProperties*>( this );
}
operator VkPhysicalDeviceSubgroupProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSubgroupProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, subgroupSize, supportedStages, supportedOperations, quadOperationsInAllStages );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( subgroupSize == rhs.subgroupSize )
&& ( supportedStages == rhs.supportedStages )
&& ( supportedOperations == rhs.supportedOperations )
&& ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
#endif
}
bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
void * pNext = {};
uint32_t subgroupSize = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {};
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {};
VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupProperties>
{
using Type = PhysicalDeviceSubgroupProperties;
};
// wrapper struct for struct VkPhysicalDeviceSubgroupSizeControlFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubgroupSizeControlFeatures.html
struct PhysicalDeviceSubgroupSizeControlFeatures
{
using NativeType = VkPhysicalDeviceSubgroupSizeControlFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures(VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, subgroupSizeControl{ subgroupSizeControl_ }, computeFullSubgroups{ computeFullSubgroups_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSubgroupSizeControlFeatures( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSubgroupSizeControlFeatures( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs ) )
{}
PhysicalDeviceSubgroupSizeControlFeatures & operator=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSubgroupSizeControlFeatures & operator=( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
{
subgroupSizeControl = subgroupSizeControl_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
{
computeFullSubgroups = computeFullSubgroups_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeatures*>( this );
}
operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeatures*>( this );
}
operator VkPhysicalDeviceSubgroupSizeControlFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeatures*>( this );
}
operator VkPhysicalDeviceSubgroupSizeControlFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, subgroupSizeControl, computeFullSubgroups );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSubgroupSizeControlFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( subgroupSizeControl == rhs.subgroupSizeControl )
&& ( computeFullSubgroups == rhs.computeFullSubgroups );
#endif
}
bool operator!=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlFeatures>
{
using Type = PhysicalDeviceSubgroupSizeControlFeatures;
};
using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures;
// wrapper struct for struct VkPhysicalDeviceSubgroupSizeControlProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubgroupSizeControlProperties.html
struct PhysicalDeviceSubgroupSizeControlProperties
{
using NativeType = VkPhysicalDeviceSubgroupSizeControlProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties(uint32_t minSubgroupSize_ = {}, uint32_t maxSubgroupSize_ = {}, uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minSubgroupSize{ minSubgroupSize_ }, maxSubgroupSize{ maxSubgroupSize_ }, maxComputeWorkgroupSubgroups{ maxComputeWorkgroupSubgroups_ }, requiredSubgroupSizeStages{ requiredSubgroupSizeStages_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSubgroupSizeControlProperties( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSubgroupSizeControlProperties( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs ) )
{}
PhysicalDeviceSubgroupSizeControlProperties & operator=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSubgroupSizeControlProperties & operator=( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlProperties*>( this );
}
operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlProperties*>( this );
}
operator VkPhysicalDeviceSubgroupSizeControlProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlProperties*>( this );
}
operator VkPhysicalDeviceSubgroupSizeControlProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSubgroupSizeControlProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minSubgroupSize == rhs.minSubgroupSize )
&& ( maxSubgroupSize == rhs.maxSubgroupSize )
&& ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups )
&& ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages );
#endif
}
bool operator!=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
void * pNext = {};
uint32_t minSubgroupSize = {};
uint32_t maxSubgroupSize = {};
uint32_t maxComputeWorkgroupSubgroups = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlProperties>
{
using Type = PhysicalDeviceSubgroupSizeControlProperties;
};
using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties;
// wrapper struct for struct VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT.html
struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT
{
using NativeType = VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassMergeFeedbackFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, subpassMergeFeedback{ subpassMergeFeedback_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( *reinterpret_cast<PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & setSubpassMergeFeedback( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ ) VULKAN_HPP_NOEXCEPT
{
subpassMergeFeedback = subpassMergeFeedback_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT*>( this );
}
operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT*>( this );
}
operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT*>( this );
}
operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, subpassMergeFeedback );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( subpassMergeFeedback == rhs.subpassMergeFeedback );
#endif
}
bool operator!=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT>
{
using Type = PhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceSubpassShadingFeaturesHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubpassShadingFeaturesHUAWEI.html
struct PhysicalDeviceSubpassShadingFeaturesHUAWEI
{
using NativeType = VkPhysicalDeviceSubpassShadingFeaturesHUAWEI;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI(VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, subpassShading{ subpassShading_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSubpassShadingFeaturesHUAWEI( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSubpassShadingFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceSubpassShadingFeaturesHUAWEI const *>( &rhs ) )
{}
PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setSubpassShading( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ ) VULKAN_HPP_NOEXCEPT
{
subpassShading = subpassShading_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubpassShadingFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSubpassShadingFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSubpassShadingFeaturesHUAWEI*>( this );
}
operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSubpassShadingFeaturesHUAWEI*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, subpassShading );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & ) const = default;
#else
bool operator==( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( subpassShading == rhs.subpassShading );
#endif
}
bool operator!=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 subpassShading = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI>
{
using Type = PhysicalDeviceSubpassShadingFeaturesHUAWEI;
};
// wrapper struct for struct VkPhysicalDeviceSubpassShadingPropertiesHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubpassShadingPropertiesHUAWEI.html
struct PhysicalDeviceSubpassShadingPropertiesHUAWEI
{
using NativeType = VkPhysicalDeviceSubpassShadingPropertiesHUAWEI;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI(uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxSubpassShadingWorkgroupSizeAspectRatio{ maxSubpassShadingWorkgroupSizeAspectRatio_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSubpassShadingPropertiesHUAWEI( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSubpassShadingPropertiesHUAWEI( *reinterpret_cast<PhysicalDeviceSubpassShadingPropertiesHUAWEI const *>( &rhs ) )
{}
PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubpassShadingPropertiesHUAWEI*>( this );
}
operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSubpassShadingPropertiesHUAWEI*>( this );
}
operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSubpassShadingPropertiesHUAWEI*>( this );
}
operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSubpassShadingPropertiesHUAWEI*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxSubpassShadingWorkgroupSizeAspectRatio );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & ) const = default;
#else
bool operator==( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxSubpassShadingWorkgroupSizeAspectRatio == rhs.maxSubpassShadingWorkgroupSizeAspectRatio );
#endif
}
bool operator!=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI;
void * pNext = {};
uint32_t maxSubpassShadingWorkgroupSizeAspectRatio = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI>
{
using Type = PhysicalDeviceSubpassShadingPropertiesHUAWEI;
};
// wrapper struct for struct VkPhysicalDeviceSurfaceInfo2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSurfaceInfo2KHR.html
struct PhysicalDeviceSurfaceInfo2KHR
{
using NativeType = VkPhysicalDeviceSurfaceInfo2KHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, surface{ surface_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast<PhysicalDeviceSurfaceInfo2KHR const *>( &rhs ) )
{}
PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
{
surface = surface_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( this );
}
operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>( this );
}
operator VkPhysicalDeviceSurfaceInfo2KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( this );
}
operator VkPhysicalDeviceSurfaceInfo2KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, surface );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default;
#else
bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( surface == rhs.surface );
#endif
}
bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSurfaceInfo2KHR>
{
using Type = PhysicalDeviceSurfaceInfo2KHR;
};
// wrapper struct for struct VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT.html
struct PhysicalDeviceSwapchainMaintenance1FeaturesEXT
{
using NativeType = VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSwapchainMaintenance1FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, swapchainMaintenance1{ swapchainMaintenance1_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSwapchainMaintenance1FeaturesEXT( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSwapchainMaintenance1FeaturesEXT( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSwapchainMaintenance1FeaturesEXT( *reinterpret_cast<PhysicalDeviceSwapchainMaintenance1FeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceSwapchainMaintenance1FeaturesEXT & operator=( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSwapchainMaintenance1FeaturesEXT & operator=( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSwapchainMaintenance1FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSwapchainMaintenance1FeaturesEXT & setSwapchainMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1_ ) VULKAN_HPP_NOEXCEPT
{
swapchainMaintenance1 = swapchainMaintenance1_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT*>( this );
}
operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT*>( this );
}
operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT*>( this );
}
operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, swapchainMaintenance1 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchainMaintenance1 == rhs.swapchainMaintenance1 );
#endif
}
bool operator!=( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT>
{
using Type = PhysicalDeviceSwapchainMaintenance1FeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceSynchronization2Features, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSynchronization2Features.html
struct PhysicalDeviceSynchronization2Features
{
using NativeType = VkPhysicalDeviceSynchronization2Features;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSynchronization2Features;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features(VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, synchronization2{ synchronization2_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSynchronization2Features( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSynchronization2Features( *reinterpret_cast<PhysicalDeviceSynchronization2Features const *>( &rhs ) )
{}
PhysicalDeviceSynchronization2Features & operator=( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceSynchronization2Features & operator=( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
{
synchronization2 = synchronization2_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSynchronization2Features*>( this );
}
operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSynchronization2Features*>( this );
}
operator VkPhysicalDeviceSynchronization2Features const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceSynchronization2Features*>( this );
}
operator VkPhysicalDeviceSynchronization2Features *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceSynchronization2Features*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, synchronization2 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceSynchronization2Features const & ) const = default;
#else
bool operator==( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( synchronization2 == rhs.synchronization2 );
#endif
}
bool operator!=( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2Features;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSynchronization2Features>
{
using Type = PhysicalDeviceSynchronization2Features;
};
using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features;
// wrapper struct for struct VkPhysicalDeviceTensorFeaturesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTensorFeaturesARM.html
struct PhysicalDeviceTensorFeaturesARM
{
using NativeType = VkPhysicalDeviceTensorFeaturesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTensorFeaturesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTensorFeaturesARM(VULKAN_HPP_NAMESPACE::Bool32 tensorNonPacked_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTensorAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTensorArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTensorArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTensorUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tensors_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tensorNonPacked{ tensorNonPacked_ }, shaderTensorAccess{ shaderTensorAccess_ }, shaderStorageTensorArrayDynamicIndexing{ shaderStorageTensorArrayDynamicIndexing_ }, shaderStorageTensorArrayNonUniformIndexing{ shaderStorageTensorArrayNonUniformIndexing_ }, descriptorBindingStorageTensorUpdateAfterBind{ descriptorBindingStorageTensorUpdateAfterBind_ }, tensors{ tensors_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTensorFeaturesARM( PhysicalDeviceTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTensorFeaturesARM( VkPhysicalDeviceTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTensorFeaturesARM( *reinterpret_cast<PhysicalDeviceTensorFeaturesARM const *>( &rhs ) )
{}
PhysicalDeviceTensorFeaturesARM & operator=( PhysicalDeviceTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTensorFeaturesARM & operator=( VkPhysicalDeviceTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorFeaturesARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM & setTensorNonPacked( VULKAN_HPP_NAMESPACE::Bool32 tensorNonPacked_ ) VULKAN_HPP_NOEXCEPT
{
tensorNonPacked = tensorNonPacked_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM & setShaderTensorAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTensorAccess_ ) VULKAN_HPP_NOEXCEPT
{
shaderTensorAccess = shaderTensorAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM & setShaderStorageTensorArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTensorArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageTensorArrayDynamicIndexing = shaderStorageTensorArrayDynamicIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM & setShaderStorageTensorArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTensorArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageTensorArrayNonUniformIndexing = shaderStorageTensorArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM & setDescriptorBindingStorageTensorUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTensorUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageTensorUpdateAfterBind = descriptorBindingStorageTensorUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM & setTensors( VULKAN_HPP_NAMESPACE::Bool32 tensors_ ) VULKAN_HPP_NOEXCEPT
{
tensors = tensors_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceTensorFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTensorFeaturesARM*>( this );
}
operator VkPhysicalDeviceTensorFeaturesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTensorFeaturesARM*>( this );
}
operator VkPhysicalDeviceTensorFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTensorFeaturesARM*>( this );
}
operator VkPhysicalDeviceTensorFeaturesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTensorFeaturesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tensorNonPacked, shaderTensorAccess, shaderStorageTensorArrayDynamicIndexing, shaderStorageTensorArrayNonUniformIndexing, descriptorBindingStorageTensorUpdateAfterBind, tensors );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTensorFeaturesARM const & ) const = default;
#else
bool operator==( PhysicalDeviceTensorFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tensorNonPacked == rhs.tensorNonPacked )
&& ( shaderTensorAccess == rhs.shaderTensorAccess )
&& ( shaderStorageTensorArrayDynamicIndexing == rhs.shaderStorageTensorArrayDynamicIndexing )
&& ( shaderStorageTensorArrayNonUniformIndexing == rhs.shaderStorageTensorArrayNonUniformIndexing )
&& ( descriptorBindingStorageTensorUpdateAfterBind == rhs.descriptorBindingStorageTensorUpdateAfterBind )
&& ( tensors == rhs.tensors );
#endif
}
bool operator!=( PhysicalDeviceTensorFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTensorFeaturesARM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 tensorNonPacked = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTensorAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTensorArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTensorArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTensorUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 tensors = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTensorFeaturesARM>
{
using Type = PhysicalDeviceTensorFeaturesARM;
};
// wrapper struct for struct VkPhysicalDeviceTensorPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTensorPropertiesARM.html
struct PhysicalDeviceTensorPropertiesARM
{
using NativeType = VkPhysicalDeviceTensorPropertiesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTensorPropertiesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTensorPropertiesARM(uint32_t maxTensorDimensionCount_ = {}, uint64_t maxTensorElements_ = {}, uint64_t maxPerDimensionTensorElements_ = {}, int64_t maxTensorStride_ = {}, uint64_t maxTensorSize_ = {}, uint32_t maxTensorShaderAccessArrayLength_ = {}, uint32_t maxTensorShaderAccessSize_ = {}, uint32_t maxDescriptorSetStorageTensors_ = {}, uint32_t maxPerStageDescriptorSetStorageTensors_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageTensors_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageTensors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTensorArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderTensorSupportedStages_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxTensorDimensionCount{ maxTensorDimensionCount_ }, maxTensorElements{ maxTensorElements_ }, maxPerDimensionTensorElements{ maxPerDimensionTensorElements_ }, maxTensorStride{ maxTensorStride_ }, maxTensorSize{ maxTensorSize_ }, maxTensorShaderAccessArrayLength{ maxTensorShaderAccessArrayLength_ }, maxTensorShaderAccessSize{ maxTensorShaderAccessSize_ }, maxDescriptorSetStorageTensors{ maxDescriptorSetStorageTensors_ }, maxPerStageDescriptorSetStorageTensors{ maxPerStageDescriptorSetStorageTensors_ }, maxDescriptorSetUpdateAfterBindStorageTensors{ maxDescriptorSetUpdateAfterBindStorageTensors_ }, maxPerStageDescriptorUpdateAfterBindStorageTensors{ maxPerStageDescriptorUpdateAfterBindStorageTensors_ }, shaderStorageTensorArrayNonUniformIndexingNative{ shaderStorageTensorArrayNonUniformIndexingNative_ }, shaderTensorSupportedStages{ shaderTensorSupportedStages_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTensorPropertiesARM( PhysicalDeviceTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTensorPropertiesARM( VkPhysicalDeviceTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTensorPropertiesARM( *reinterpret_cast<PhysicalDeviceTensorPropertiesARM const *>( &rhs ) )
{}
PhysicalDeviceTensorPropertiesARM & operator=( PhysicalDeviceTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTensorPropertiesARM & operator=( VkPhysicalDeviceTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorPropertiesARM const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceTensorPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTensorPropertiesARM*>( this );
}
operator VkPhysicalDeviceTensorPropertiesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTensorPropertiesARM*>( this );
}
operator VkPhysicalDeviceTensorPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTensorPropertiesARM*>( this );
}
operator VkPhysicalDeviceTensorPropertiesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTensorPropertiesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint64_t const &, uint64_t const &, int64_t const &, uint64_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxTensorDimensionCount, maxTensorElements, maxPerDimensionTensorElements, maxTensorStride, maxTensorSize, maxTensorShaderAccessArrayLength, maxTensorShaderAccessSize, maxDescriptorSetStorageTensors, maxPerStageDescriptorSetStorageTensors, maxDescriptorSetUpdateAfterBindStorageTensors, maxPerStageDescriptorUpdateAfterBindStorageTensors, shaderStorageTensorArrayNonUniformIndexingNative, shaderTensorSupportedStages );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTensorPropertiesARM const & ) const = default;
#else
bool operator==( PhysicalDeviceTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxTensorDimensionCount == rhs.maxTensorDimensionCount )
&& ( maxTensorElements == rhs.maxTensorElements )
&& ( maxPerDimensionTensorElements == rhs.maxPerDimensionTensorElements )
&& ( maxTensorStride == rhs.maxTensorStride )
&& ( maxTensorSize == rhs.maxTensorSize )
&& ( maxTensorShaderAccessArrayLength == rhs.maxTensorShaderAccessArrayLength )
&& ( maxTensorShaderAccessSize == rhs.maxTensorShaderAccessSize )
&& ( maxDescriptorSetStorageTensors == rhs.maxDescriptorSetStorageTensors )
&& ( maxPerStageDescriptorSetStorageTensors == rhs.maxPerStageDescriptorSetStorageTensors )
&& ( maxDescriptorSetUpdateAfterBindStorageTensors == rhs.maxDescriptorSetUpdateAfterBindStorageTensors )
&& ( maxPerStageDescriptorUpdateAfterBindStorageTensors == rhs.maxPerStageDescriptorUpdateAfterBindStorageTensors )
&& ( shaderStorageTensorArrayNonUniformIndexingNative == rhs.shaderStorageTensorArrayNonUniformIndexingNative )
&& ( shaderTensorSupportedStages == rhs.shaderTensorSupportedStages );
#endif
}
bool operator!=( PhysicalDeviceTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTensorPropertiesARM;
void * pNext = {};
uint32_t maxTensorDimensionCount = {};
uint64_t maxTensorElements = {};
uint64_t maxPerDimensionTensorElements = {};
int64_t maxTensorStride = {};
uint64_t maxTensorSize = {};
uint32_t maxTensorShaderAccessArrayLength = {};
uint32_t maxTensorShaderAccessSize = {};
uint32_t maxDescriptorSetStorageTensors = {};
uint32_t maxPerStageDescriptorSetStorageTensors = {};
uint32_t maxDescriptorSetUpdateAfterBindStorageTensors = {};
uint32_t maxPerStageDescriptorUpdateAfterBindStorageTensors = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTensorArrayNonUniformIndexingNative = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderTensorSupportedStages = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTensorPropertiesARM>
{
using Type = PhysicalDeviceTensorPropertiesARM;
};
// wrapper struct for struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT.html
struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
{
using NativeType = VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, texelBufferAlignment{ texelBufferAlignment_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTexelBufferAlignmentFeaturesEXT( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
{
texelBufferAlignment = texelBufferAlignment_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
}
operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
}
operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
}
operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, texelBufferAlignment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( texelBufferAlignment == rhs.texelBufferAlignment );
#endif
}
bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT>
{
using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceTexelBufferAlignmentProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTexelBufferAlignmentProperties.html
struct PhysicalDeviceTexelBufferAlignmentProperties
{
using NativeType = VkPhysicalDeviceTexelBufferAlignmentProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties(VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, storageTexelBufferOffsetAlignmentBytes{ storageTexelBufferOffsetAlignmentBytes_ }, storageTexelBufferOffsetSingleTexelAlignment{ storageTexelBufferOffsetSingleTexelAlignment_ }, uniformTexelBufferOffsetAlignmentBytes{ uniformTexelBufferOffsetAlignmentBytes_ }, uniformTexelBufferOffsetSingleTexelAlignment{ uniformTexelBufferOffsetSingleTexelAlignment_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTexelBufferAlignmentProperties( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTexelBufferAlignmentProperties( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs ) )
{}
PhysicalDeviceTexelBufferAlignmentProperties & operator=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTexelBufferAlignmentProperties & operator=( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentProperties*>( this );
}
operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentProperties*>( this );
}
operator VkPhysicalDeviceTexelBufferAlignmentProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentProperties*>( this );
}
operator VkPhysicalDeviceTexelBufferAlignmentProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, storageTexelBufferOffsetAlignmentBytes, storageTexelBufferOffsetSingleTexelAlignment, uniformTexelBufferOffsetAlignmentBytes, uniformTexelBufferOffsetSingleTexelAlignment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTexelBufferAlignmentProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes )
&& ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment )
&& ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes )
&& ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment );
#endif
}
bool operator!=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentProperties>
{
using Type = PhysicalDeviceTexelBufferAlignmentProperties;
};
using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties;
// wrapper struct for struct VkPhysicalDeviceTextureCompressionASTCHDRFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTextureCompressionASTCHDRFeatures.html
struct PhysicalDeviceTextureCompressionASTCHDRFeatures
{
using NativeType = VkPhysicalDeviceTextureCompressionASTCHDRFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, textureCompressionASTC_HDR{ textureCompressionASTC_HDR_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTextureCompressionASTCHDRFeatures( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTextureCompressionASTCHDRFeatures( *reinterpret_cast<PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs ) )
{}
PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeatures*>( this );
}
operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeatures*>( this );
}
operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeatures*>( this );
}
operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, textureCompressionASTC_HDR );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR );
#endif
}
bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures>
{
using Type = PhysicalDeviceTextureCompressionASTCHDRFeatures;
};
using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures;
// wrapper struct for struct VkPhysicalDeviceTileMemoryHeapFeaturesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTileMemoryHeapFeaturesQCOM.html
struct PhysicalDeviceTileMemoryHeapFeaturesQCOM
{
using NativeType = VkPhysicalDeviceTileMemoryHeapFeaturesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTileMemoryHeapFeaturesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTileMemoryHeapFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 tileMemoryHeap_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tileMemoryHeap{ tileMemoryHeap_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTileMemoryHeapFeaturesQCOM( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTileMemoryHeapFeaturesQCOM( VkPhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTileMemoryHeapFeaturesQCOM( *reinterpret_cast<PhysicalDeviceTileMemoryHeapFeaturesQCOM const *>( &rhs ) )
{}
PhysicalDeviceTileMemoryHeapFeaturesQCOM & operator=( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTileMemoryHeapFeaturesQCOM & operator=( VkPhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTileMemoryHeapFeaturesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapFeaturesQCOM & setTileMemoryHeap( VULKAN_HPP_NAMESPACE::Bool32 tileMemoryHeap_ ) VULKAN_HPP_NOEXCEPT
{
tileMemoryHeap = tileMemoryHeap_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceTileMemoryHeapFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTileMemoryHeapFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceTileMemoryHeapFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTileMemoryHeapFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceTileMemoryHeapFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTileMemoryHeapFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceTileMemoryHeapFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTileMemoryHeapFeaturesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tileMemoryHeap );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tileMemoryHeap == rhs.tileMemoryHeap );
#endif
}
bool operator!=( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTileMemoryHeapFeaturesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 tileMemoryHeap = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTileMemoryHeapFeaturesQCOM>
{
using Type = PhysicalDeviceTileMemoryHeapFeaturesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceTileMemoryHeapPropertiesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTileMemoryHeapPropertiesQCOM.html
struct PhysicalDeviceTileMemoryHeapPropertiesQCOM
{
using NativeType = VkPhysicalDeviceTileMemoryHeapPropertiesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTileMemoryHeapPropertiesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTileMemoryHeapPropertiesQCOM(VULKAN_HPP_NAMESPACE::Bool32 queueSubmitBoundary_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileBufferTransfers_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, queueSubmitBoundary{ queueSubmitBoundary_ }, tileBufferTransfers{ tileBufferTransfers_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTileMemoryHeapPropertiesQCOM( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTileMemoryHeapPropertiesQCOM( VkPhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTileMemoryHeapPropertiesQCOM( *reinterpret_cast<PhysicalDeviceTileMemoryHeapPropertiesQCOM const *>( &rhs ) )
{}
PhysicalDeviceTileMemoryHeapPropertiesQCOM & operator=( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTileMemoryHeapPropertiesQCOM & operator=( VkPhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTileMemoryHeapPropertiesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapPropertiesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapPropertiesQCOM & setQueueSubmitBoundary( VULKAN_HPP_NAMESPACE::Bool32 queueSubmitBoundary_ ) VULKAN_HPP_NOEXCEPT
{
queueSubmitBoundary = queueSubmitBoundary_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapPropertiesQCOM & setTileBufferTransfers( VULKAN_HPP_NAMESPACE::Bool32 tileBufferTransfers_ ) VULKAN_HPP_NOEXCEPT
{
tileBufferTransfers = tileBufferTransfers_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceTileMemoryHeapPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTileMemoryHeapPropertiesQCOM*>( this );
}
operator VkPhysicalDeviceTileMemoryHeapPropertiesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTileMemoryHeapPropertiesQCOM*>( this );
}
operator VkPhysicalDeviceTileMemoryHeapPropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTileMemoryHeapPropertiesQCOM*>( this );
}
operator VkPhysicalDeviceTileMemoryHeapPropertiesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTileMemoryHeapPropertiesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, queueSubmitBoundary, tileBufferTransfers );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( queueSubmitBoundary == rhs.queueSubmitBoundary )
&& ( tileBufferTransfers == rhs.tileBufferTransfers );
#endif
}
bool operator!=( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTileMemoryHeapPropertiesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 queueSubmitBoundary = {};
VULKAN_HPP_NAMESPACE::Bool32 tileBufferTransfers = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTileMemoryHeapPropertiesQCOM>
{
using Type = PhysicalDeviceTileMemoryHeapPropertiesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceTilePropertiesFeaturesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTilePropertiesFeaturesQCOM.html
struct PhysicalDeviceTilePropertiesFeaturesQCOM
{
using NativeType = VkPhysicalDeviceTilePropertiesFeaturesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tileProperties{ tileProperties_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTilePropertiesFeaturesQCOM( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTilePropertiesFeaturesQCOM( *reinterpret_cast<PhysicalDeviceTilePropertiesFeaturesQCOM const *>( &rhs ) )
{}
PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setTileProperties( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ ) VULKAN_HPP_NOEXCEPT
{
tileProperties = tileProperties_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceTilePropertiesFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTilePropertiesFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceTilePropertiesFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTilePropertiesFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceTilePropertiesFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTilePropertiesFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceTilePropertiesFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTilePropertiesFeaturesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tileProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTilePropertiesFeaturesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tileProperties == rhs.tileProperties );
#endif
}
bool operator!=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 tileProperties = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM>
{
using Type = PhysicalDeviceTilePropertiesFeaturesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceTileShadingFeaturesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTileShadingFeaturesQCOM.html
struct PhysicalDeviceTileShadingFeaturesQCOM
{
using NativeType = VkPhysicalDeviceTileShadingFeaturesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTileShadingFeaturesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTileShadingFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 tileShading_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingFragmentStage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingDepthAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingStencilAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingInputAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingSampledAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingPerTileDraw_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingPerTileDispatch_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingDispatchTile_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingApron_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingAnisotropicApron_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingAtomicOps_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tileShadingImageProcessing_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tileShading{ tileShading_ }, tileShadingFragmentStage{ tileShadingFragmentStage_ }, tileShadingColorAttachments{ tileShadingColorAttachments_ }, tileShadingDepthAttachments{ tileShadingDepthAttachments_ }, tileShadingStencilAttachments{ tileShadingStencilAttachments_ }, tileShadingInputAttachments{ tileShadingInputAttachments_ }, tileShadingSampledAttachments{ tileShadingSampledAttachments_ }, tileShadingPerTileDraw{ tileShadingPerTileDraw_ }, tileShadingPerTileDispatch{ tileShadingPerTileDispatch_ }, tileShadingDispatchTile{ tileShadingDispatchTile_ }, tileShadingApron{ tileShadingApron_ }, tileShadingAnisotropicApron{ tileShadingAnisotropicApron_ }, tileShadingAtomicOps{ tileShadingAtomicOps_ }, tileShadingImageProcessing{ tileShadingImageProcessing_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTileShadingFeaturesQCOM( PhysicalDeviceTileShadingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTileShadingFeaturesQCOM( VkPhysicalDeviceTileShadingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTileShadingFeaturesQCOM( *reinterpret_cast<PhysicalDeviceTileShadingFeaturesQCOM const *>( &rhs ) )
{}
PhysicalDeviceTileShadingFeaturesQCOM & operator=( PhysicalDeviceTileShadingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTileShadingFeaturesQCOM & operator=( VkPhysicalDeviceTileShadingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTileShadingFeaturesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShading( VULKAN_HPP_NAMESPACE::Bool32 tileShading_ ) VULKAN_HPP_NOEXCEPT
{
tileShading = tileShading_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingFragmentStage( VULKAN_HPP_NAMESPACE::Bool32 tileShadingFragmentStage_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingFragmentStage = tileShadingFragmentStage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingColorAttachments( VULKAN_HPP_NAMESPACE::Bool32 tileShadingColorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingColorAttachments = tileShadingColorAttachments_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingDepthAttachments( VULKAN_HPP_NAMESPACE::Bool32 tileShadingDepthAttachments_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingDepthAttachments = tileShadingDepthAttachments_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingStencilAttachments( VULKAN_HPP_NAMESPACE::Bool32 tileShadingStencilAttachments_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingStencilAttachments = tileShadingStencilAttachments_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingInputAttachments( VULKAN_HPP_NAMESPACE::Bool32 tileShadingInputAttachments_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingInputAttachments = tileShadingInputAttachments_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingSampledAttachments( VULKAN_HPP_NAMESPACE::Bool32 tileShadingSampledAttachments_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingSampledAttachments = tileShadingSampledAttachments_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingPerTileDraw( VULKAN_HPP_NAMESPACE::Bool32 tileShadingPerTileDraw_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingPerTileDraw = tileShadingPerTileDraw_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingPerTileDispatch( VULKAN_HPP_NAMESPACE::Bool32 tileShadingPerTileDispatch_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingPerTileDispatch = tileShadingPerTileDispatch_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingDispatchTile( VULKAN_HPP_NAMESPACE::Bool32 tileShadingDispatchTile_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingDispatchTile = tileShadingDispatchTile_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingApron( VULKAN_HPP_NAMESPACE::Bool32 tileShadingApron_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingApron = tileShadingApron_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingAnisotropicApron( VULKAN_HPP_NAMESPACE::Bool32 tileShadingAnisotropicApron_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingAnisotropicApron = tileShadingAnisotropicApron_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingAtomicOps( VULKAN_HPP_NAMESPACE::Bool32 tileShadingAtomicOps_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingAtomicOps = tileShadingAtomicOps_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingImageProcessing( VULKAN_HPP_NAMESPACE::Bool32 tileShadingImageProcessing_ ) VULKAN_HPP_NOEXCEPT
{
tileShadingImageProcessing = tileShadingImageProcessing_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceTileShadingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTileShadingFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceTileShadingFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTileShadingFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceTileShadingFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTileShadingFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceTileShadingFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTileShadingFeaturesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tileShading, tileShadingFragmentStage, tileShadingColorAttachments, tileShadingDepthAttachments, tileShadingStencilAttachments, tileShadingInputAttachments, tileShadingSampledAttachments, tileShadingPerTileDraw, tileShadingPerTileDispatch, tileShadingDispatchTile, tileShadingApron, tileShadingAnisotropicApron, tileShadingAtomicOps, tileShadingImageProcessing );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTileShadingFeaturesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceTileShadingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tileShading == rhs.tileShading )
&& ( tileShadingFragmentStage == rhs.tileShadingFragmentStage )
&& ( tileShadingColorAttachments == rhs.tileShadingColorAttachments )
&& ( tileShadingDepthAttachments == rhs.tileShadingDepthAttachments )
&& ( tileShadingStencilAttachments == rhs.tileShadingStencilAttachments )
&& ( tileShadingInputAttachments == rhs.tileShadingInputAttachments )
&& ( tileShadingSampledAttachments == rhs.tileShadingSampledAttachments )
&& ( tileShadingPerTileDraw == rhs.tileShadingPerTileDraw )
&& ( tileShadingPerTileDispatch == rhs.tileShadingPerTileDispatch )
&& ( tileShadingDispatchTile == rhs.tileShadingDispatchTile )
&& ( tileShadingApron == rhs.tileShadingApron )
&& ( tileShadingAnisotropicApron == rhs.tileShadingAnisotropicApron )
&& ( tileShadingAtomicOps == rhs.tileShadingAtomicOps )
&& ( tileShadingImageProcessing == rhs.tileShadingImageProcessing );
#endif
}
bool operator!=( PhysicalDeviceTileShadingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTileShadingFeaturesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShading = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingFragmentStage = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingColorAttachments = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingDepthAttachments = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingStencilAttachments = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingInputAttachments = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingSampledAttachments = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingPerTileDraw = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingPerTileDispatch = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingDispatchTile = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingApron = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingAnisotropicApron = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingAtomicOps = {};
VULKAN_HPP_NAMESPACE::Bool32 tileShadingImageProcessing = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTileShadingFeaturesQCOM>
{
using Type = PhysicalDeviceTileShadingFeaturesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceTileShadingPropertiesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTileShadingPropertiesQCOM.html
struct PhysicalDeviceTileShadingPropertiesQCOM
{
using NativeType = VkPhysicalDeviceTileShadingPropertiesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTileShadingPropertiesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTileShadingPropertiesQCOM(uint32_t maxApronSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 preferNonCoherent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D tileGranularity_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxTileShadingRate_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxApronSize{ maxApronSize_ }, preferNonCoherent{ preferNonCoherent_ }, tileGranularity{ tileGranularity_ }, maxTileShadingRate{ maxTileShadingRate_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTileShadingPropertiesQCOM( PhysicalDeviceTileShadingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTileShadingPropertiesQCOM( VkPhysicalDeviceTileShadingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTileShadingPropertiesQCOM( *reinterpret_cast<PhysicalDeviceTileShadingPropertiesQCOM const *>( &rhs ) )
{}
PhysicalDeviceTileShadingPropertiesQCOM & operator=( PhysicalDeviceTileShadingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTileShadingPropertiesQCOM & operator=( VkPhysicalDeviceTileShadingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTileShadingPropertiesQCOM const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceTileShadingPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTileShadingPropertiesQCOM*>( this );
}
operator VkPhysicalDeviceTileShadingPropertiesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTileShadingPropertiesQCOM*>( this );
}
operator VkPhysicalDeviceTileShadingPropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTileShadingPropertiesQCOM*>( this );
}
operator VkPhysicalDeviceTileShadingPropertiesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTileShadingPropertiesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxApronSize, preferNonCoherent, tileGranularity, maxTileShadingRate );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTileShadingPropertiesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceTileShadingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxApronSize == rhs.maxApronSize )
&& ( preferNonCoherent == rhs.preferNonCoherent )
&& ( tileGranularity == rhs.tileGranularity )
&& ( maxTileShadingRate == rhs.maxTileShadingRate );
#endif
}
bool operator!=( PhysicalDeviceTileShadingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTileShadingPropertiesQCOM;
void * pNext = {};
uint32_t maxApronSize = {};
VULKAN_HPP_NAMESPACE::Bool32 preferNonCoherent = {};
VULKAN_HPP_NAMESPACE::Extent2D tileGranularity = {};
VULKAN_HPP_NAMESPACE::Extent2D maxTileShadingRate = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTileShadingPropertiesQCOM>
{
using Type = PhysicalDeviceTileShadingPropertiesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceTimelineSemaphoreFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTimelineSemaphoreFeatures.html
struct PhysicalDeviceTimelineSemaphoreFeatures
{
using NativeType = VkPhysicalDeviceTimelineSemaphoreFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures(VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, timelineSemaphore{ timelineSemaphore_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTimelineSemaphoreFeatures( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs ) )
{}
PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
{
timelineSemaphore = timelineSemaphore_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
}
operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
}
operator VkPhysicalDeviceTimelineSemaphoreFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
}
operator VkPhysicalDeviceTimelineSemaphoreFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, timelineSemaphore );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( timelineSemaphore == rhs.timelineSemaphore );
#endif
}
bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreFeatures>
{
using Type = PhysicalDeviceTimelineSemaphoreFeatures;
};
using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;
// wrapper struct for struct VkPhysicalDeviceTimelineSemaphoreProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTimelineSemaphoreProperties.html
struct PhysicalDeviceTimelineSemaphoreProperties
{
using NativeType = VkPhysicalDeviceTimelineSemaphoreProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties(uint64_t maxTimelineSemaphoreValueDifference_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxTimelineSemaphoreValueDifference{ maxTimelineSemaphoreValueDifference_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTimelineSemaphoreProperties( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs ) )
{}
PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
}
operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
}
operator VkPhysicalDeviceTimelineSemaphoreProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
}
operator VkPhysicalDeviceTimelineSemaphoreProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxTimelineSemaphoreValueDifference );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference );
#endif
}
bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
void * pNext = {};
uint64_t maxTimelineSemaphoreValueDifference = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreProperties>
{
using Type = PhysicalDeviceTimelineSemaphoreProperties;
};
using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;
// wrapper struct for struct VkPhysicalDeviceToolProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceToolProperties.html
struct PhysicalDeviceToolProperties
{
using NativeType = VkPhysicalDeviceToolProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & name_ = {}, std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & version_ = {}, VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & layer_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, name{ name_ }, version{ version_ }, purposes{ purposes_ }, description{ description_ }, layer{ layer_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceToolProperties( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceToolProperties( *reinterpret_cast<PhysicalDeviceToolProperties const *>( &rhs ) )
{}
PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceToolProperties & operator=( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceToolProperties*>( this );
}
operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceToolProperties*>( this );
}
operator VkPhysicalDeviceToolProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceToolProperties*>( this );
}
operator VkPhysicalDeviceToolProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceToolProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ToolPurposeFlags const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, name, version, purposes, description, layer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = strcmp( version, rhs.version ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = purposes <=> rhs.purposes; cmp != 0 ) return cmp;
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = strcmp( layer, rhs.layer ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( strcmp( name, rhs.name ) == 0 )
&& ( strcmp( version, rhs.version ) == 0 )
&& ( purposes == rhs.purposes )
&& ( strcmp( description, rhs.description ) == 0 )
&& ( strcmp( layer, rhs.layer ) == 0 );
}
bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> name = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> version = {};
VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layer = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceToolProperties>
{
using Type = PhysicalDeviceToolProperties;
};
using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties;
// wrapper struct for struct VkPhysicalDeviceTransformFeedbackFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTransformFeedbackFeaturesEXT.html
struct PhysicalDeviceTransformFeedbackFeaturesEXT
{
using NativeType = VkPhysicalDeviceTransformFeedbackFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, transformFeedback{ transformFeedback_ }, geometryStreams{ geometryStreams_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTransformFeedbackFeaturesEXT( *reinterpret_cast<PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT
{
transformFeedback = transformFeedback_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
{
geometryStreams = geometryStreams_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
}
operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
}
operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
}
operator VkPhysicalDeviceTransformFeedbackFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, transformFeedback, geometryStreams );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( transformFeedback == rhs.transformFeedback )
&& ( geometryStreams == rhs.geometryStreams );
#endif
}
bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {};
VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT>
{
using Type = PhysicalDeviceTransformFeedbackFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceTransformFeedbackPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTransformFeedbackPropertiesEXT.html
struct PhysicalDeviceTransformFeedbackPropertiesEXT
{
using NativeType = VkPhysicalDeviceTransformFeedbackPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(uint32_t maxTransformFeedbackStreams_ = {}, uint32_t maxTransformFeedbackBuffers_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, uint32_t maxTransformFeedbackStreamDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataStride_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxTransformFeedbackStreams{ maxTransformFeedbackStreams_ }, maxTransformFeedbackBuffers{ maxTransformFeedbackBuffers_ }, maxTransformFeedbackBufferSize{ maxTransformFeedbackBufferSize_ }, maxTransformFeedbackStreamDataSize{ maxTransformFeedbackStreamDataSize_ }, maxTransformFeedbackBufferDataSize{ maxTransformFeedbackBufferDataSize_ }, maxTransformFeedbackBufferDataStride{ maxTransformFeedbackBufferDataStride_ }, transformFeedbackQueries{ transformFeedbackQueries_ }, transformFeedbackStreamsLinesTriangles{ transformFeedbackStreamsLinesTriangles_ }, transformFeedbackRasterizationStreamSelect{ transformFeedbackRasterizationStreamSelect_ }, transformFeedbackDraw{ transformFeedbackDraw_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceTransformFeedbackPropertiesEXT( *reinterpret_cast<PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
}
operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
}
operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
}
operator VkPhysicalDeviceTransformFeedbackPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxTransformFeedbackStreams, maxTransformFeedbackBuffers, maxTransformFeedbackBufferSize, maxTransformFeedbackStreamDataSize, maxTransformFeedbackBufferDataSize, maxTransformFeedbackBufferDataStride, transformFeedbackQueries, transformFeedbackStreamsLinesTriangles, transformFeedbackRasterizationStreamSelect, transformFeedbackDraw );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams )
&& ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers )
&& ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize )
&& ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize )
&& ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize )
&& ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride )
&& ( transformFeedbackQueries == rhs.transformFeedbackQueries )
&& ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles )
&& ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect )
&& ( transformFeedbackDraw == rhs.transformFeedbackDraw );
#endif
}
bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
void * pNext = {};
uint32_t maxTransformFeedbackStreams = {};
uint32_t maxTransformFeedbackBuffers = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {};
uint32_t maxTransformFeedbackStreamDataSize = {};
uint32_t maxTransformFeedbackBufferDataSize = {};
uint32_t maxTransformFeedbackBufferDataStride = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT>
{
using Type = PhysicalDeviceTransformFeedbackPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR.html
struct PhysicalDeviceUnifiedImageLayoutsFeaturesKHR
{
using NativeType = VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUnifiedImageLayoutsFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceUnifiedImageLayoutsFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 unifiedImageLayouts_ = {}, VULKAN_HPP_NAMESPACE::Bool32 unifiedImageLayoutsVideo_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, unifiedImageLayouts{ unifiedImageLayouts_ }, unifiedImageLayoutsVideo{ unifiedImageLayoutsVideo_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceUnifiedImageLayoutsFeaturesKHR( PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceUnifiedImageLayoutsFeaturesKHR( VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceUnifiedImageLayoutsFeaturesKHR( *reinterpret_cast<PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceUnifiedImageLayoutsFeaturesKHR & operator=( PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceUnifiedImageLayoutsFeaturesKHR & operator=( VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUnifiedImageLayoutsFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUnifiedImageLayoutsFeaturesKHR & setUnifiedImageLayouts( VULKAN_HPP_NAMESPACE::Bool32 unifiedImageLayouts_ ) VULKAN_HPP_NOEXCEPT
{
unifiedImageLayouts = unifiedImageLayouts_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUnifiedImageLayoutsFeaturesKHR & setUnifiedImageLayoutsVideo( VULKAN_HPP_NAMESPACE::Bool32 unifiedImageLayoutsVideo_ ) VULKAN_HPP_NOEXCEPT
{
unifiedImageLayoutsVideo = unifiedImageLayoutsVideo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR*>( this );
}
operator VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR*>( this );
}
operator VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR*>( this );
}
operator VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, unifiedImageLayouts, unifiedImageLayoutsVideo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( unifiedImageLayouts == rhs.unifiedImageLayouts )
&& ( unifiedImageLayoutsVideo == rhs.unifiedImageLayoutsVideo );
#endif
}
bool operator!=( PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUnifiedImageLayoutsFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 unifiedImageLayouts = {};
VULKAN_HPP_NAMESPACE::Bool32 unifiedImageLayoutsVideo = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceUnifiedImageLayoutsFeaturesKHR>
{
using Type = PhysicalDeviceUnifiedImageLayoutsFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceUniformBufferStandardLayoutFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceUniformBufferStandardLayoutFeatures.html
struct PhysicalDeviceUniformBufferStandardLayoutFeatures
{
using NativeType = VkPhysicalDeviceUniformBufferStandardLayoutFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, uniformBufferStandardLayout{ uniformBufferStandardLayout_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceUniformBufferStandardLayoutFeatures( *reinterpret_cast<PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs ) )
{}
PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
{
uniformBufferStandardLayout = uniformBufferStandardLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
}
operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
}
operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
}
operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, uniformBufferStandardLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
#endif
}
bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures>
{
using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures;
};
using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures;
// wrapper struct for struct VkPhysicalDeviceVariablePointersFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVariablePointersFeatures.html
struct PhysicalDeviceVariablePointersFeatures
{
using NativeType = VkPhysicalDeviceVariablePointersFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures(VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, variablePointersStorageBuffer{ variablePointersStorageBuffer_ }, variablePointers{ variablePointers_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVariablePointersFeatures( *reinterpret_cast<PhysicalDeviceVariablePointersFeatures const *>( &rhs ) )
{}
PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
{
variablePointersStorageBuffer = variablePointersStorageBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
{
variablePointers = variablePointers_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures*>( this );
}
operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures*>( this );
}
operator VkPhysicalDeviceVariablePointersFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures*>( this );
}
operator VkPhysicalDeviceVariablePointersFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, variablePointersStorageBuffer, variablePointers );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
&& ( variablePointers == rhs.variablePointers );
#endif
}
bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVariablePointersFeatures>
{
using Type = PhysicalDeviceVariablePointersFeatures;
};
using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures;
using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
// wrapper struct for struct VkPhysicalDeviceVertexAttributeDivisorFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexAttributeDivisorFeatures.html
struct PhysicalDeviceVertexAttributeDivisorFeatures
{
using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeatures(VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, vertexAttributeInstanceRateDivisor{ vertexAttributeInstanceRateDivisor_ }, vertexAttributeInstanceRateZeroDivisor{ vertexAttributeInstanceRateZeroDivisor_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeatures( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVertexAttributeDivisorFeatures( VkPhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVertexAttributeDivisorFeatures( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorFeatures const *>( &rhs ) )
{}
PhysicalDeviceVertexAttributeDivisorFeatures & operator=( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVertexAttributeDivisorFeatures & operator=( VkPhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVertexAttributeDivisorFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeatures*>( this );
}
operator VkPhysicalDeviceVertexAttributeDivisorFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeatures*>( this );
}
operator VkPhysicalDeviceVertexAttributeDivisorFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeatures*>( this );
}
operator VkPhysicalDeviceVertexAttributeDivisorFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, vertexAttributeInstanceRateDivisor, vertexAttributeInstanceRateZeroDivisor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor )
&& ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor );
#endif
}
bool operator!=( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures>
{
using Type = PhysicalDeviceVertexAttributeDivisorFeatures;
};
using PhysicalDeviceVertexAttributeDivisorFeaturesEXT = PhysicalDeviceVertexAttributeDivisorFeatures;
using PhysicalDeviceVertexAttributeDivisorFeaturesKHR = PhysicalDeviceVertexAttributeDivisorFeatures;
// wrapper struct for struct VkPhysicalDeviceVertexAttributeDivisorProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexAttributeDivisorProperties.html
struct PhysicalDeviceVertexAttributeDivisorProperties
{
using NativeType = VkPhysicalDeviceVertexAttributeDivisorProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorProperties(uint32_t maxVertexAttribDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxVertexAttribDivisor{ maxVertexAttribDivisor_ }, supportsNonZeroFirstInstance{ supportsNonZeroFirstInstance_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorProperties( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVertexAttributeDivisorProperties( VkPhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVertexAttributeDivisorProperties( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorProperties const *>( &rhs ) )
{}
PhysicalDeviceVertexAttributeDivisorProperties & operator=( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVertexAttributeDivisorProperties & operator=( VkPhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorProperties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceVertexAttributeDivisorProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorProperties*>( this );
}
operator VkPhysicalDeviceVertexAttributeDivisorProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorProperties*>( this );
}
operator VkPhysicalDeviceVertexAttributeDivisorProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorProperties*>( this );
}
operator VkPhysicalDeviceVertexAttributeDivisorProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxVertexAttribDivisor, supportsNonZeroFirstInstance );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVertexAttributeDivisorProperties const & ) const = default;
#else
bool operator==( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor )
&& ( supportsNonZeroFirstInstance == rhs.supportsNonZeroFirstInstance );
#endif
}
bool operator!=( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorProperties;
void * pNext = {};
uint32_t maxVertexAttribDivisor = {};
VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorProperties>
{
using Type = PhysicalDeviceVertexAttributeDivisorProperties;
};
using PhysicalDeviceVertexAttributeDivisorPropertiesKHR = PhysicalDeviceVertexAttributeDivisorProperties;
// wrapper struct for struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT.html
struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
{
using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT(uint32_t maxVertexAttribDivisor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxVertexAttribDivisor{ maxVertexAttribDivisor_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVertexAttributeDivisorPropertiesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs ) )
{}
PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
}
operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
}
operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
}
operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxVertexAttribDivisor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
#endif
}
bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
void * pNext = {};
uint32_t maxVertexAttribDivisor = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT>
{
using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
};
// wrapper struct for struct VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT.html
struct PhysicalDeviceVertexAttributeRobustnessFeaturesEXT
{
using NativeType = VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeRobustnessFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeRobustness_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, vertexAttributeRobustness{ vertexAttributeRobustness_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & operator=( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & setVertexAttributeRobustness( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeRobustness_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeRobustness = vertexAttributeRobustness_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT*>( this );
}
operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT*>( this );
}
operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT*>( this );
}
operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, vertexAttributeRobustness );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( vertexAttributeRobustness == rhs.vertexAttributeRobustness );
#endif
}
bool operator!=( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeRobustness = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT>
{
using Type = PhysicalDeviceVertexAttributeRobustnessFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT.html
struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT
{
using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, vertexInputDynamicState{ vertexInputDynamicState_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVertexInputDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT
{
vertexInputDynamicState = vertexInputDynamicState_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT*>( this );
}
operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT*>( this );
}
operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT*>( this );
}
operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, vertexInputDynamicState );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( vertexInputDynamicState == rhs.vertexInputDynamicState );
#endif
}
bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT>
{
using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceVideoDecodeVP9FeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoDecodeVP9FeaturesKHR.html
struct PhysicalDeviceVideoDecodeVP9FeaturesKHR
{
using NativeType = VkPhysicalDeviceVideoDecodeVP9FeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoDecodeVp9FeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoDecodeVP9FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 videoDecodeVP9_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, videoDecodeVP9{ videoDecodeVP9_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoDecodeVP9FeaturesKHR( PhysicalDeviceVideoDecodeVP9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVideoDecodeVP9FeaturesKHR( VkPhysicalDeviceVideoDecodeVP9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVideoDecodeVP9FeaturesKHR( *reinterpret_cast<PhysicalDeviceVideoDecodeVP9FeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceVideoDecodeVP9FeaturesKHR & operator=( PhysicalDeviceVideoDecodeVP9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVideoDecodeVP9FeaturesKHR & operator=( VkPhysicalDeviceVideoDecodeVP9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoDecodeVP9FeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoDecodeVP9FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoDecodeVP9FeaturesKHR & setVideoDecodeVP9( VULKAN_HPP_NAMESPACE::Bool32 videoDecodeVP9_ ) VULKAN_HPP_NOEXCEPT
{
videoDecodeVP9 = videoDecodeVP9_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVideoDecodeVP9FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVideoDecodeVP9FeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoDecodeVP9FeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVideoDecodeVP9FeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoDecodeVP9FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVideoDecodeVP9FeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoDecodeVP9FeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVideoDecodeVP9FeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, videoDecodeVP9 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVideoDecodeVP9FeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceVideoDecodeVP9FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( videoDecodeVP9 == rhs.videoDecodeVP9 );
#endif
}
bool operator!=( PhysicalDeviceVideoDecodeVP9FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoDecodeVp9FeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 videoDecodeVP9 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVideoDecodeVp9FeaturesKHR>
{
using Type = PhysicalDeviceVideoDecodeVP9FeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceVideoEncodeAV1FeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoEncodeAV1FeaturesKHR.html
struct PhysicalDeviceVideoEncodeAV1FeaturesKHR
{
using NativeType = VkPhysicalDeviceVideoEncodeAV1FeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeAV1FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 videoEncodeAV1_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, videoEncodeAV1{ videoEncodeAV1_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeAV1FeaturesKHR( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVideoEncodeAV1FeaturesKHR( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVideoEncodeAV1FeaturesKHR( *reinterpret_cast<PhysicalDeviceVideoEncodeAV1FeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceVideoEncodeAV1FeaturesKHR & operator=( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVideoEncodeAV1FeaturesKHR & operator=( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeAV1FeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeAV1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeAV1FeaturesKHR & setVideoEncodeAV1( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeAV1_ ) VULKAN_HPP_NOEXCEPT
{
videoEncodeAV1 = videoEncodeAV1_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVideoEncodeAV1FeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVideoEncodeAV1FeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVideoEncodeAV1FeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVideoEncodeAV1FeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, videoEncodeAV1 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( videoEncodeAV1 == rhs.videoEncodeAV1 );
#endif
}
bool operator!=( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 videoEncodeAV1 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR>
{
using Type = PhysicalDeviceVideoEncodeAV1FeaturesKHR;
};
// wrapper struct for struct VkVideoProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoProfileInfoKHR.html
struct VideoProfileInfoKHR
{
using NativeType = VkVideoProfileInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR(VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone, VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {}, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {}, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, videoCodecOperation{ videoCodecOperation_ }, chromaSubsampling{ chromaSubsampling_ }, lumaBitDepth{ lumaBitDepth_ }, chromaBitDepth{ chromaBitDepth_ }
{}
VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoProfileInfoKHR( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoProfileInfoKHR( *reinterpret_cast<VideoProfileInfoKHR const *>( &rhs ) )
{}
VideoProfileInfoKHR & operator=( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoProfileInfoKHR & operator=( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setVideoCodecOperation( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT
{
videoCodecOperation = videoCodecOperation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setChromaSubsampling( VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) VULKAN_HPP_NOEXCEPT
{
chromaSubsampling = chromaSubsampling_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT
{
lumaBitDepth = lumaBitDepth_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT
{
chromaBitDepth = chromaBitDepth_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoProfileInfoKHR*>( this );
}
operator VkVideoProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoProfileInfoKHR*>( this );
}
operator VkVideoProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoProfileInfoKHR*>( this );
}
operator VkVideoProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoProfileInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoProfileInfoKHR const & ) const = default;
#else
bool operator==( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( videoCodecOperation == rhs.videoCodecOperation )
&& ( chromaSubsampling == rhs.chromaSubsampling )
&& ( lumaBitDepth == rhs.lumaBitDepth )
&& ( chromaBitDepth == rhs.chromaBitDepth );
#endif
}
bool operator!=( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone;
VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {};
VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {};
VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoProfileInfoKHR>
{
using Type = VideoProfileInfoKHR;
};
// wrapper struct for struct VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR.html
struct PhysicalDeviceVideoEncodeQualityLevelInfoKHR
{
using NativeType = VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQualityLevelInfoKHR(const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ = {}, uint32_t qualityLevel_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pVideoProfile{ pVideoProfile_ }, qualityLevel{ qualityLevel_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQualityLevelInfoKHR( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVideoEncodeQualityLevelInfoKHR( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVideoEncodeQualityLevelInfoKHR( *reinterpret_cast<PhysicalDeviceVideoEncodeQualityLevelInfoKHR const *>( &rhs ) )
{}
PhysicalDeviceVideoEncodeQualityLevelInfoKHR & operator=( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVideoEncodeQualityLevelInfoKHR & operator=( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT
{
pVideoProfile = pVideoProfile_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT
{
qualityLevel = qualityLevel_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR*>( this );
}
operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR*>( this );
}
operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR*>( this );
}
operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pVideoProfile, qualityLevel );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pVideoProfile == rhs.pVideoProfile )
&& ( qualityLevel == rhs.qualityLevel );
#endif
}
bool operator!=( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR;
const void * pNext = {};
const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile = {};
uint32_t qualityLevel = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR>
{
using Type = PhysicalDeviceVideoEncodeQualityLevelInfoKHR;
};
// wrapper struct for struct VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR.html
struct PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR
{
using NativeType = VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 videoEncodeQuantizationMap_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, videoEncodeQuantizationMap{ videoEncodeQuantizationMap_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( *reinterpret_cast<PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & operator=( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & operator=( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & setVideoEncodeQuantizationMap( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeQuantizationMap_ ) VULKAN_HPP_NOEXCEPT
{
videoEncodeQuantizationMap = videoEncodeQuantizationMap_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, videoEncodeQuantizationMap );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( videoEncodeQuantizationMap == rhs.videoEncodeQuantizationMap );
#endif
}
bool operator!=( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 videoEncodeQuantizationMap = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR>
{
using Type = PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceVideoFormatInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoFormatInfoKHR.html
struct PhysicalDeviceVideoFormatInfoKHR
{
using NativeType = VkPhysicalDeviceVideoFormatInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR(VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageUsage{ imageUsage_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast<PhysicalDeviceVideoFormatInfoKHR const *>( &rhs ) )
{}
PhysicalDeviceVideoFormatInfoKHR & operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
{
imageUsage = imageUsage_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR*>( this );
}
operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVideoFormatInfoKHR*>( this );
}
operator VkPhysicalDeviceVideoFormatInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR*>( this );
}
operator VkPhysicalDeviceVideoFormatInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVideoFormatInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageUsage );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVideoFormatInfoKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageUsage == rhs.imageUsage );
#endif
}
bool operator!=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVideoFormatInfoKHR>
{
using Type = PhysicalDeviceVideoFormatInfoKHR;
};
// wrapper struct for struct VkPhysicalDeviceVideoMaintenance1FeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoMaintenance1FeaturesKHR.html
struct PhysicalDeviceVideoMaintenance1FeaturesKHR
{
using NativeType = VkPhysicalDeviceVideoMaintenance1FeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance1FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, videoMaintenance1{ videoMaintenance1_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance1FeaturesKHR( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVideoMaintenance1FeaturesKHR( VkPhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVideoMaintenance1FeaturesKHR( *reinterpret_cast<PhysicalDeviceVideoMaintenance1FeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceVideoMaintenance1FeaturesKHR & operator=( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVideoMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance1FeaturesKHR & setVideoMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1_ ) VULKAN_HPP_NOEXCEPT
{
videoMaintenance1 = videoMaintenance1_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVideoMaintenance1FeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVideoMaintenance1FeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVideoMaintenance1FeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVideoMaintenance1FeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, videoMaintenance1 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVideoMaintenance1FeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( videoMaintenance1 == rhs.videoMaintenance1 );
#endif
}
bool operator!=( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR>
{
using Type = PhysicalDeviceVideoMaintenance1FeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceVideoMaintenance2FeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoMaintenance2FeaturesKHR.html
struct PhysicalDeviceVideoMaintenance2FeaturesKHR
{
using NativeType = VkPhysicalDeviceVideoMaintenance2FeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance2FeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance2_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, videoMaintenance2{ videoMaintenance2_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance2FeaturesKHR( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVideoMaintenance2FeaturesKHR( VkPhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVideoMaintenance2FeaturesKHR( *reinterpret_cast<PhysicalDeviceVideoMaintenance2FeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceVideoMaintenance2FeaturesKHR & operator=( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVideoMaintenance2FeaturesKHR & operator=( VkPhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance2FeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance2FeaturesKHR & setVideoMaintenance2( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance2_ ) VULKAN_HPP_NOEXCEPT
{
videoMaintenance2 = videoMaintenance2_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVideoMaintenance2FeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVideoMaintenance2FeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVideoMaintenance2FeaturesKHR*>( this );
}
operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVideoMaintenance2FeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, videoMaintenance2 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVideoMaintenance2FeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( videoMaintenance2 == rhs.videoMaintenance2 );
#endif
}
bool operator!=( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance2 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR>
{
using Type = PhysicalDeviceVideoMaintenance2FeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceVulkan11Features, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan11Features.html
struct PhysicalDeviceVulkan11Features
{
using NativeType = VkPhysicalDeviceVulkan11Features;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, storageBuffer16BitAccess{ storageBuffer16BitAccess_ }, uniformAndStorageBuffer16BitAccess{ uniformAndStorageBuffer16BitAccess_ }, storagePushConstant16{ storagePushConstant16_ }, storageInputOutput16{ storageInputOutput16_ }, multiview{ multiview_ }, multiviewGeometryShader{ multiviewGeometryShader_ }, multiviewTessellationShader{ multiviewTessellationShader_ }, variablePointersStorageBuffer{ variablePointersStorageBuffer_ }, variablePointers{ variablePointers_ }, protectedMemory{ protectedMemory_ }, samplerYcbcrConversion{ samplerYcbcrConversion_ }, shaderDrawParameters{ shaderDrawParameters_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan11Features( *reinterpret_cast<PhysicalDeviceVulkan11Features const *>( &rhs ) )
{}
PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
storageBuffer16BitAccess = storageBuffer16BitAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
{
storagePushConstant16 = storagePushConstant16_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
{
storageInputOutput16 = storageInputOutput16_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
{
multiview = multiview_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
{
multiviewGeometryShader = multiviewGeometryShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
{
multiviewTessellationShader = multiviewTessellationShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
{
variablePointersStorageBuffer = variablePointersStorageBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
{
variablePointers = variablePointers_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
{
protectedMemory = protectedMemory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
{
samplerYcbcrConversion = samplerYcbcrConversion_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
{
shaderDrawParameters = shaderDrawParameters_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan11Features*>( this );
}
operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan11Features*>( this );
}
operator VkPhysicalDeviceVulkan11Features const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVulkan11Features*>( this );
}
operator VkPhysicalDeviceVulkan11Features *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVulkan11Features*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16, multiview, multiviewGeometryShader, multiviewTessellationShader, variablePointersStorageBuffer, variablePointers, protectedMemory, samplerYcbcrConversion, shaderDrawParameters );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default;
#else
bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
&& ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
&& ( storagePushConstant16 == rhs.storagePushConstant16 )
&& ( storageInputOutput16 == rhs.storageInputOutput16 )
&& ( multiview == rhs.multiview )
&& ( multiviewGeometryShader == rhs.multiviewGeometryShader )
&& ( multiviewTessellationShader == rhs.multiviewTessellationShader )
&& ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
&& ( variablePointers == rhs.variablePointers )
&& ( protectedMemory == rhs.protectedMemory )
&& ( samplerYcbcrConversion == rhs.samplerYcbcrConversion )
&& ( shaderDrawParameters == rhs.shaderDrawParameters );
#endif
}
bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Features>
{
using Type = PhysicalDeviceVulkan11Features;
};
// wrapper struct for struct VkPhysicalDeviceVulkan11Properties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan11Properties.html
struct PhysicalDeviceVulkan11Properties
{
using NativeType = VkPhysicalDeviceVulkan11Properties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties(std::array<uint8_t,VK_UUID_SIZE> const & deviceUUID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & driverUUID_ = {}, std::array<uint8_t,VK_LUID_SIZE> const & deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, deviceUUID{ deviceUUID_ }, driverUUID{ driverUUID_ }, deviceLUID{ deviceLUID_ }, deviceNodeMask{ deviceNodeMask_ }, deviceLUIDValid{ deviceLUIDValid_ }, subgroupSize{ subgroupSize_ }, subgroupSupportedStages{ subgroupSupportedStages_ }, subgroupSupportedOperations{ subgroupSupportedOperations_ }, subgroupQuadOperationsInAllStages{ subgroupQuadOperationsInAllStages_ }, pointClippingBehavior{ pointClippingBehavior_ }, maxMultiviewViewCount{ maxMultiviewViewCount_ }, maxMultiviewInstanceIndex{ maxMultiviewInstanceIndex_ }, protectedNoFault{ protectedNoFault_ }, maxPerSetDescriptors{ maxPerSetDescriptors_ }, maxMemoryAllocationSize{ maxMemoryAllocationSize_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan11Properties( *reinterpret_cast<PhysicalDeviceVulkan11Properties const *>( &rhs ) )
{}
PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan11Properties*>( this );
}
operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan11Properties*>( this );
}
operator VkPhysicalDeviceVulkan11Properties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVulkan11Properties*>( this );
}
operator VkPhysicalDeviceVulkan11Properties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVulkan11Properties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::PointClippingBehavior const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid, subgroupSize, subgroupSupportedStages, subgroupSupportedOperations, subgroupQuadOperationsInAllStages, pointClippingBehavior, maxMultiviewViewCount, maxMultiviewInstanceIndex, protectedNoFault, maxPerSetDescriptors, maxMemoryAllocationSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default;
#else
bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( deviceUUID == rhs.deviceUUID )
&& ( driverUUID == rhs.driverUUID )
&& ( deviceLUID == rhs.deviceLUID )
&& ( deviceNodeMask == rhs.deviceNodeMask )
&& ( deviceLUIDValid == rhs.deviceLUIDValid )
&& ( subgroupSize == rhs.subgroupSize )
&& ( subgroupSupportedStages == rhs.subgroupSupportedStages )
&& ( subgroupSupportedOperations == rhs.subgroupSupportedOperations )
&& ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages )
&& ( pointClippingBehavior == rhs.pointClippingBehavior )
&& ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
&& ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex )
&& ( protectedNoFault == rhs.protectedNoFault )
&& ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
&& ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
#endif
}
bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
uint32_t deviceNodeMask = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
uint32_t subgroupSize = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {};
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {};
VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {};
VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
uint32_t maxMultiviewViewCount = {};
uint32_t maxMultiviewInstanceIndex = {};
VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
uint32_t maxPerSetDescriptors = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Properties>
{
using Type = PhysicalDeviceVulkan11Properties;
};
// wrapper struct for struct VkPhysicalDeviceVulkan12Features, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan12Features.html
struct PhysicalDeviceVulkan12Features
{
using NativeType = VkPhysicalDeviceVulkan12Features;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features(VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, samplerMirrorClampToEdge{ samplerMirrorClampToEdge_ }, drawIndirectCount{ drawIndirectCount_ }, storageBuffer8BitAccess{ storageBuffer8BitAccess_ }, uniformAndStorageBuffer8BitAccess{ uniformAndStorageBuffer8BitAccess_ }, storagePushConstant8{ storagePushConstant8_ }, shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ }, shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ }, shaderFloat16{ shaderFloat16_ }, shaderInt8{ shaderInt8_ }, descriptorIndexing{ descriptorIndexing_ }, shaderInputAttachmentArrayDynamicIndexing{ shaderInputAttachmentArrayDynamicIndexing_ }, shaderUniformTexelBufferArrayDynamicIndexing{ shaderUniformTexelBufferArrayDynamicIndexing_ }, shaderStorageTexelBufferArrayDynamicIndexing{ shaderStorageTexelBufferArrayDynamicIndexing_ }, shaderUniformBufferArrayNonUniformIndexing{ shaderUniformBufferArrayNonUniformIndexing_ }, shaderSampledImageArrayNonUniformIndexing{ shaderSampledImageArrayNonUniformIndexing_ }, shaderStorageBufferArrayNonUniformIndexing{ shaderStorageBufferArrayNonUniformIndexing_ }, shaderStorageImageArrayNonUniformIndexing{ shaderStorageImageArrayNonUniformIndexing_ }, shaderInputAttachmentArrayNonUniformIndexing{ shaderInputAttachmentArrayNonUniformIndexing_ }, shaderUniformTexelBufferArrayNonUniformIndexing{ shaderUniformTexelBufferArrayNonUniformIndexing_ }, shaderStorageTexelBufferArrayNonUniformIndexing{ shaderStorageTexelBufferArrayNonUniformIndexing_ }, descriptorBindingUniformBufferUpdateAfterBind{ descriptorBindingUniformBufferUpdateAfterBind_ }, descriptorBindingSampledImageUpdateAfterBind{ descriptorBindingSampledImageUpdateAfterBind_ }, descriptorBindingStorageImageUpdateAfterBind{ descriptorBindingStorageImageUpdateAfterBind_ }, descriptorBindingStorageBufferUpdateAfterBind{ descriptorBindingStorageBufferUpdateAfterBind_ }, descriptorBindingUniformTexelBufferUpdateAfterBind{ descriptorBindingUniformTexelBufferUpdateAfterBind_ }, descriptorBindingStorageTexelBufferUpdateAfterBind{ descriptorBindingStorageTexelBufferUpdateAfterBind_ }, descriptorBindingUpdateUnusedWhilePending{ descriptorBindingUpdateUnusedWhilePending_ }, descriptorBindingPartiallyBound{ descriptorBindingPartiallyBound_ }, descriptorBindingVariableDescriptorCount{ descriptorBindingVariableDescriptorCount_ }, runtimeDescriptorArray{ runtimeDescriptorArray_ }, samplerFilterMinmax{ samplerFilterMinmax_ }, scalarBlockLayout{ scalarBlockLayout_ }, imagelessFramebuffer{ imagelessFramebuffer_ }, uniformBufferStandardLayout{ uniformBufferStandardLayout_ }, shaderSubgroupExtendedTypes{ shaderSubgroupExtendedTypes_ }, separateDepthStencilLayouts{ separateDepthStencilLayouts_ }, hostQueryReset{ hostQueryReset_ }, timelineSemaphore{ timelineSemaphore_ }, bufferDeviceAddress{ bufferDeviceAddress_ }, bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ }, bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ }, vulkanMemoryModel{ vulkanMemoryModel_ }, vulkanMemoryModelDeviceScope{ vulkanMemoryModelDeviceScope_ }, vulkanMemoryModelAvailabilityVisibilityChains{ vulkanMemoryModelAvailabilityVisibilityChains_ }, shaderOutputViewportIndex{ shaderOutputViewportIndex_ }, shaderOutputLayer{ shaderOutputLayer_ }, subgroupBroadcastDynamicId{ subgroupBroadcastDynamicId_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan12Features( *reinterpret_cast<PhysicalDeviceVulkan12Features const *>( &rhs ) )
{}
PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT
{
samplerMirrorClampToEdge = samplerMirrorClampToEdge_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT
{
drawIndirectCount = drawIndirectCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
storageBuffer8BitAccess = storageBuffer8BitAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
{
storagePushConstant8 = storagePushConstant8_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloat16 = shaderFloat16_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt8 = shaderInt8_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT
{
descriptorIndexing = descriptorIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
{
runtimeDescriptorArray = runtimeDescriptorArray_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT
{
samplerFilterMinmax = samplerFilterMinmax_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
{
scalarBlockLayout = scalarBlockLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
{
imagelessFramebuffer = imagelessFramebuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
{
uniformBufferStandardLayout = uniformBufferStandardLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
{
separateDepthStencilLayouts = separateDepthStencilLayouts_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
{
hostQueryReset = hostQueryReset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
{
timelineSemaphore = timelineSemaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddress = bufferDeviceAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModel = vulkanMemoryModel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT
{
shaderOutputViewportIndex = shaderOutputViewportIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT
{
shaderOutputLayer = shaderOutputLayer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT
{
subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan12Features*>( this );
}
operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan12Features*>( this );
}
operator VkPhysicalDeviceVulkan12Features const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVulkan12Features*>( this );
}
operator VkPhysicalDeviceVulkan12Features *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVulkan12Features*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, samplerMirrorClampToEdge, drawIndirectCount, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8, shaderBufferInt64Atomics, shaderSharedInt64Atomics, shaderFloat16, shaderInt8, descriptorIndexing, shaderInputAttachmentArrayDynamicIndexing, shaderUniformTexelBufferArrayDynamicIndexing, shaderStorageTexelBufferArrayDynamicIndexing, shaderUniformBufferArrayNonUniformIndexing, shaderSampledImageArrayNonUniformIndexing, shaderStorageBufferArrayNonUniformIndexing, shaderStorageImageArrayNonUniformIndexing, shaderInputAttachmentArrayNonUniformIndexing, shaderUniformTexelBufferArrayNonUniformIndexing, shaderStorageTexelBufferArrayNonUniformIndexing, descriptorBindingUniformBufferUpdateAfterBind, descriptorBindingSampledImageUpdateAfterBind, descriptorBindingStorageImageUpdateAfterBind, descriptorBindingStorageBufferUpdateAfterBind, descriptorBindingUniformTexelBufferUpdateAfterBind, descriptorBindingStorageTexelBufferUpdateAfterBind, descriptorBindingUpdateUnusedWhilePending, descriptorBindingPartiallyBound, descriptorBindingVariableDescriptorCount, runtimeDescriptorArray, samplerFilterMinmax, scalarBlockLayout, imagelessFramebuffer, uniformBufferStandardLayout, shaderSubgroupExtendedTypes, separateDepthStencilLayouts, hostQueryReset, timelineSemaphore, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains, shaderOutputViewportIndex, shaderOutputLayer, subgroupBroadcastDynamicId );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default;
#else
bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge )
&& ( drawIndirectCount == rhs.drawIndirectCount )
&& ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
&& ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
&& ( storagePushConstant8 == rhs.storagePushConstant8 )
&& ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
&& ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics )
&& ( shaderFloat16 == rhs.shaderFloat16 )
&& ( shaderInt8 == rhs.shaderInt8 )
&& ( descriptorIndexing == rhs.descriptorIndexing )
&& ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
&& ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
&& ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
&& ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
&& ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
&& ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
&& ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
&& ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
&& ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
&& ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
&& ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
&& ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
&& ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
&& ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
&& ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
&& ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
&& ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
&& ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
&& ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
&& ( runtimeDescriptorArray == rhs.runtimeDescriptorArray )
&& ( samplerFilterMinmax == rhs.samplerFilterMinmax )
&& ( scalarBlockLayout == rhs.scalarBlockLayout )
&& ( imagelessFramebuffer == rhs.imagelessFramebuffer )
&& ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout )
&& ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes )
&& ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts )
&& ( hostQueryReset == rhs.hostQueryReset )
&& ( timelineSemaphore == rhs.timelineSemaphore )
&& ( bufferDeviceAddress == rhs.bufferDeviceAddress )
&& ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
&& ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice )
&& ( vulkanMemoryModel == rhs.vulkanMemoryModel )
&& ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
&& ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains )
&& ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex )
&& ( shaderOutputLayer == rhs.shaderOutputLayer )
&& ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId );
#endif
}
bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {};
VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {};
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {};
VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {};
VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Features>
{
using Type = PhysicalDeviceVulkan12Features;
};
// wrapper struct for struct VkPhysicalDeviceVulkan12Properties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan12Properties.html
struct PhysicalDeviceVulkan12Properties
{
using NativeType = VkPhysicalDeviceVulkan12Properties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array<char,VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {}, std::array<char,VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, uint64_t maxTimelineSemaphoreValueDifference_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, driverID{ driverID_ }, driverName{ driverName_ }, driverInfo{ driverInfo_ }, conformanceVersion{ conformanceVersion_ }, denormBehaviorIndependence{ denormBehaviorIndependence_ }, roundingModeIndependence{ roundingModeIndependence_ }, shaderSignedZeroInfNanPreserveFloat16{ shaderSignedZeroInfNanPreserveFloat16_ }, shaderSignedZeroInfNanPreserveFloat32{ shaderSignedZeroInfNanPreserveFloat32_ }, shaderSignedZeroInfNanPreserveFloat64{ shaderSignedZeroInfNanPreserveFloat64_ }, shaderDenormPreserveFloat16{ shaderDenormPreserveFloat16_ }, shaderDenormPreserveFloat32{ shaderDenormPreserveFloat32_ }, shaderDenormPreserveFloat64{ shaderDenormPreserveFloat64_ }, shaderDenormFlushToZeroFloat16{ shaderDenormFlushToZeroFloat16_ }, shaderDenormFlushToZeroFloat32{ shaderDenormFlushToZeroFloat32_ }, shaderDenormFlushToZeroFloat64{ shaderDenormFlushToZeroFloat64_ }, shaderRoundingModeRTEFloat16{ shaderRoundingModeRTEFloat16_ }, shaderRoundingModeRTEFloat32{ shaderRoundingModeRTEFloat32_ }, shaderRoundingModeRTEFloat64{ shaderRoundingModeRTEFloat64_ }, shaderRoundingModeRTZFloat16{ shaderRoundingModeRTZFloat16_ }, shaderRoundingModeRTZFloat32{ shaderRoundingModeRTZFloat32_ }, shaderRoundingModeRTZFloat64{ shaderRoundingModeRTZFloat64_ }, maxUpdateAfterBindDescriptorsInAllPools{ maxUpdateAfterBindDescriptorsInAllPools_ }, shaderUniformBufferArrayNonUniformIndexingNative{ shaderUniformBufferArrayNonUniformIndexingNative_ }, shaderSampledImageArrayNonUniformIndexingNative{ shaderSampledImageArrayNonUniformIndexingNative_ }, shaderStorageBufferArrayNonUniformIndexingNative{ shaderStorageBufferArrayNonUniformIndexingNative_ }, shaderStorageImageArrayNonUniformIndexingNative{ shaderStorageImageArrayNonUniformIndexingNative_ }, shaderInputAttachmentArrayNonUniformIndexingNative{ shaderInputAttachmentArrayNonUniformIndexingNative_ }, robustBufferAccessUpdateAfterBind{ robustBufferAccessUpdateAfterBind_ }, quadDivergentImplicitLod{ quadDivergentImplicitLod_ }, maxPerStageDescriptorUpdateAfterBindSamplers{ maxPerStageDescriptorUpdateAfterBindSamplers_ }, maxPerStageDescriptorUpdateAfterBindUniformBuffers{ maxPerStageDescriptorUpdateAfterBindUniformBuffers_ }, maxPerStageDescriptorUpdateAfterBindStorageBuffers{ maxPerStageDescriptorUpdateAfterBindStorageBuffers_ }, maxPerStageDescriptorUpdateAfterBindSampledImages{ maxPerStageDescriptorUpdateAfterBindSampledImages_ }, maxPerStageDescriptorUpdateAfterBindStorageImages{ maxPerStageDescriptorUpdateAfterBindStorageImages_ }, maxPerStageDescriptorUpdateAfterBindInputAttachments{ maxPerStageDescriptorUpdateAfterBindInputAttachments_ }, maxPerStageUpdateAfterBindResources{ maxPerStageUpdateAfterBindResources_ }, maxDescriptorSetUpdateAfterBindSamplers{ maxDescriptorSetUpdateAfterBindSamplers_ }, maxDescriptorSetUpdateAfterBindUniformBuffers{ maxDescriptorSetUpdateAfterBindUniformBuffers_ }, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ }, maxDescriptorSetUpdateAfterBindStorageBuffers{ maxDescriptorSetUpdateAfterBindStorageBuffers_ }, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ }, maxDescriptorSetUpdateAfterBindSampledImages{ maxDescriptorSetUpdateAfterBindSampledImages_ }, maxDescriptorSetUpdateAfterBindStorageImages{ maxDescriptorSetUpdateAfterBindStorageImages_ }, maxDescriptorSetUpdateAfterBindInputAttachments{ maxDescriptorSetUpdateAfterBindInputAttachments_ }, supportedDepthResolveModes{ supportedDepthResolveModes_ }, supportedStencilResolveModes{ supportedStencilResolveModes_ }, independentResolveNone{ independentResolveNone_ }, independentResolve{ independentResolve_ }, filterMinmaxSingleComponentFormats{ filterMinmaxSingleComponentFormats_ }, filterMinmaxImageComponentMapping{ filterMinmaxImageComponentMapping_ }, maxTimelineSemaphoreValueDifference{ maxTimelineSemaphoreValueDifference_ }, framebufferIntegerColorSampleCounts{ framebufferIntegerColorSampleCounts_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan12Properties( *reinterpret_cast<PhysicalDeviceVulkan12Properties const *>( &rhs ) )
{}
PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan12Properties*>( this );
}
operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan12Properties*>( this );
}
operator VkPhysicalDeviceVulkan12Properties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVulkan12Properties*>( this );
}
operator VkPhysicalDeviceVulkan12Properties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVulkan12Properties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DriverId const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> const &, VULKAN_HPP_NAMESPACE::ConformanceVersion const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint64_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion, denormBehaviorIndependence, roundingModeIndependence, shaderSignedZeroInfNanPreserveFloat16, shaderSignedZeroInfNanPreserveFloat32, shaderSignedZeroInfNanPreserveFloat64, shaderDenormPreserveFloat16, shaderDenormPreserveFloat32, shaderDenormPreserveFloat64, shaderDenormFlushToZeroFloat16, shaderDenormFlushToZeroFloat32, shaderDenormFlushToZeroFloat64, shaderRoundingModeRTEFloat16, shaderRoundingModeRTEFloat32, shaderRoundingModeRTEFloat64, shaderRoundingModeRTZFloat16, shaderRoundingModeRTZFloat32, shaderRoundingModeRTZFloat64, maxUpdateAfterBindDescriptorsInAllPools, shaderUniformBufferArrayNonUniformIndexingNative, shaderSampledImageArrayNonUniformIndexingNative, shaderStorageBufferArrayNonUniformIndexingNative, shaderStorageImageArrayNonUniformIndexingNative, shaderInputAttachmentArrayNonUniformIndexingNative, robustBufferAccessUpdateAfterBind, quadDivergentImplicitLod, maxPerStageDescriptorUpdateAfterBindSamplers, maxPerStageDescriptorUpdateAfterBindUniformBuffers, maxPerStageDescriptorUpdateAfterBindStorageBuffers, maxPerStageDescriptorUpdateAfterBindSampledImages, maxPerStageDescriptorUpdateAfterBindStorageImages, maxPerStageDescriptorUpdateAfterBindInputAttachments, maxPerStageUpdateAfterBindResources, maxDescriptorSetUpdateAfterBindSamplers, maxDescriptorSetUpdateAfterBindUniformBuffers, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, maxDescriptorSetUpdateAfterBindStorageBuffers, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, maxDescriptorSetUpdateAfterBindSampledImages, maxDescriptorSetUpdateAfterBindStorageImages, maxDescriptorSetUpdateAfterBindInputAttachments, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping, maxTimelineSemaphoreValueDifference, framebufferIntegerColorSampleCounts );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 ) return cmp;
if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 ) return cmp;
if ( auto cmp = denormBehaviorIndependence <=> rhs.denormBehaviorIndependence; cmp != 0 ) return cmp;
if ( auto cmp = roundingModeIndependence <=> rhs.roundingModeIndependence; cmp != 0 ) return cmp;
if ( auto cmp = shaderSignedZeroInfNanPreserveFloat16 <=> rhs.shaderSignedZeroInfNanPreserveFloat16; cmp != 0 ) return cmp;
if ( auto cmp = shaderSignedZeroInfNanPreserveFloat32 <=> rhs.shaderSignedZeroInfNanPreserveFloat32; cmp != 0 ) return cmp;
if ( auto cmp = shaderSignedZeroInfNanPreserveFloat64 <=> rhs.shaderSignedZeroInfNanPreserveFloat64; cmp != 0 ) return cmp;
if ( auto cmp = shaderDenormPreserveFloat16 <=> rhs.shaderDenormPreserveFloat16; cmp != 0 ) return cmp;
if ( auto cmp = shaderDenormPreserveFloat32 <=> rhs.shaderDenormPreserveFloat32; cmp != 0 ) return cmp;
if ( auto cmp = shaderDenormPreserveFloat64 <=> rhs.shaderDenormPreserveFloat64; cmp != 0 ) return cmp;
if ( auto cmp = shaderDenormFlushToZeroFloat16 <=> rhs.shaderDenormFlushToZeroFloat16; cmp != 0 ) return cmp;
if ( auto cmp = shaderDenormFlushToZeroFloat32 <=> rhs.shaderDenormFlushToZeroFloat32; cmp != 0 ) return cmp;
if ( auto cmp = shaderDenormFlushToZeroFloat64 <=> rhs.shaderDenormFlushToZeroFloat64; cmp != 0 ) return cmp;
if ( auto cmp = shaderRoundingModeRTEFloat16 <=> rhs.shaderRoundingModeRTEFloat16; cmp != 0 ) return cmp;
if ( auto cmp = shaderRoundingModeRTEFloat32 <=> rhs.shaderRoundingModeRTEFloat32; cmp != 0 ) return cmp;
if ( auto cmp = shaderRoundingModeRTEFloat64 <=> rhs.shaderRoundingModeRTEFloat64; cmp != 0 ) return cmp;
if ( auto cmp = shaderRoundingModeRTZFloat16 <=> rhs.shaderRoundingModeRTZFloat16; cmp != 0 ) return cmp;
if ( auto cmp = shaderRoundingModeRTZFloat32 <=> rhs.shaderRoundingModeRTZFloat32; cmp != 0 ) return cmp;
if ( auto cmp = shaderRoundingModeRTZFloat64 <=> rhs.shaderRoundingModeRTZFloat64; cmp != 0 ) return cmp;
if ( auto cmp = maxUpdateAfterBindDescriptorsInAllPools <=> rhs.maxUpdateAfterBindDescriptorsInAllPools; cmp != 0 ) return cmp;
if ( auto cmp = shaderUniformBufferArrayNonUniformIndexingNative <=> rhs.shaderUniformBufferArrayNonUniformIndexingNative; cmp != 0 ) return cmp;
if ( auto cmp = shaderSampledImageArrayNonUniformIndexingNative <=> rhs.shaderSampledImageArrayNonUniformIndexingNative; cmp != 0 ) return cmp;
if ( auto cmp = shaderStorageBufferArrayNonUniformIndexingNative <=> rhs.shaderStorageBufferArrayNonUniformIndexingNative; cmp != 0 ) return cmp;
if ( auto cmp = shaderStorageImageArrayNonUniformIndexingNative <=> rhs.shaderStorageImageArrayNonUniformIndexingNative; cmp != 0 ) return cmp;
if ( auto cmp = shaderInputAttachmentArrayNonUniformIndexingNative <=> rhs.shaderInputAttachmentArrayNonUniformIndexingNative; cmp != 0 ) return cmp;
if ( auto cmp = robustBufferAccessUpdateAfterBind <=> rhs.robustBufferAccessUpdateAfterBind; cmp != 0 ) return cmp;
if ( auto cmp = quadDivergentImplicitLod <=> rhs.quadDivergentImplicitLod; cmp != 0 ) return cmp;
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSamplers <=> rhs.maxPerStageDescriptorUpdateAfterBindSamplers; cmp != 0 ) return cmp;
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindUniformBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers; cmp != 0 ) return cmp;
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers; cmp != 0 ) return cmp;
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSampledImages <=> rhs.maxPerStageDescriptorUpdateAfterBindSampledImages; cmp != 0 ) return cmp;
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageImages <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageImages; cmp != 0 ) return cmp;
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindInputAttachments <=> rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments; cmp != 0 ) return cmp;
if ( auto cmp = maxPerStageUpdateAfterBindResources <=> rhs.maxPerStageUpdateAfterBindResources; cmp != 0 ) return cmp;
if ( auto cmp = maxDescriptorSetUpdateAfterBindSamplers <=> rhs.maxDescriptorSetUpdateAfterBindSamplers; cmp != 0 ) return cmp;
if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffers <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffers; cmp != 0 ) return cmp;
if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; cmp != 0 ) return cmp;
if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffers <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffers; cmp != 0 ) return cmp;
if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; cmp != 0 ) return cmp;
if ( auto cmp = maxDescriptorSetUpdateAfterBindSampledImages <=> rhs.maxDescriptorSetUpdateAfterBindSampledImages; cmp != 0 ) return cmp;
if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageImages <=> rhs.maxDescriptorSetUpdateAfterBindStorageImages; cmp != 0 ) return cmp;
if ( auto cmp = maxDescriptorSetUpdateAfterBindInputAttachments <=> rhs.maxDescriptorSetUpdateAfterBindInputAttachments; cmp != 0 ) return cmp;
if ( auto cmp = supportedDepthResolveModes <=> rhs.supportedDepthResolveModes; cmp != 0 ) return cmp;
if ( auto cmp = supportedStencilResolveModes <=> rhs.supportedStencilResolveModes; cmp != 0 ) return cmp;
if ( auto cmp = independentResolveNone <=> rhs.independentResolveNone; cmp != 0 ) return cmp;
if ( auto cmp = independentResolve <=> rhs.independentResolve; cmp != 0 ) return cmp;
if ( auto cmp = filterMinmaxSingleComponentFormats <=> rhs.filterMinmaxSingleComponentFormats; cmp != 0 ) return cmp;
if ( auto cmp = filterMinmaxImageComponentMapping <=> rhs.filterMinmaxImageComponentMapping; cmp != 0 ) return cmp;
if ( auto cmp = maxTimelineSemaphoreValueDifference <=> rhs.maxTimelineSemaphoreValueDifference; cmp != 0 ) return cmp;
if ( auto cmp = framebufferIntegerColorSampleCounts <=> rhs.framebufferIntegerColorSampleCounts; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( driverID == rhs.driverID )
&& ( strcmp( driverName, rhs.driverName ) == 0 )
&& ( strcmp( driverInfo, rhs.driverInfo ) == 0 )
&& ( conformanceVersion == rhs.conformanceVersion )
&& ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
&& ( roundingModeIndependence == rhs.roundingModeIndependence )
&& ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
&& ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
&& ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
&& ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
&& ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
&& ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
&& ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
&& ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
&& ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
&& ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
&& ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
&& ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
&& ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
&& ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
&& ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 )
&& ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
&& ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
&& ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
&& ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
&& ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
&& ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
&& ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
&& ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
&& ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
&& ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
&& ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
&& ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
&& ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
&& ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
&& ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
&& ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
&& ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
&& ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
&& ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
&& ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
&& ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
&& ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
&& ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments )
&& ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
&& ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
&& ( independentResolveNone == rhs.independentResolveNone )
&& ( independentResolve == rhs.independentResolve )
&& ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
&& ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping )
&& ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference )
&& ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts );
}
bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
uint32_t maxPerStageUpdateAfterBindResources = {};
uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
uint64_t maxTimelineSemaphoreValueDifference = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Properties>
{
using Type = PhysicalDeviceVulkan12Properties;
};
// wrapper struct for struct VkPhysicalDeviceVulkan13Features, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan13Features.html
struct PhysicalDeviceVulkan13Features
{
using NativeType = VkPhysicalDeviceVulkan13Features;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Features;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, robustImageAccess{ robustImageAccess_ }, inlineUniformBlock{ inlineUniformBlock_ }, descriptorBindingInlineUniformBlockUpdateAfterBind{ descriptorBindingInlineUniformBlockUpdateAfterBind_ }, pipelineCreationCacheControl{ pipelineCreationCacheControl_ }, privateData{ privateData_ }, shaderDemoteToHelperInvocation{ shaderDemoteToHelperInvocation_ }, shaderTerminateInvocation{ shaderTerminateInvocation_ }, subgroupSizeControl{ subgroupSizeControl_ }, computeFullSubgroups{ computeFullSubgroups_ }, synchronization2{ synchronization2_ }, textureCompressionASTC_HDR{ textureCompressionASTC_HDR_ }, shaderZeroInitializeWorkgroupMemory{ shaderZeroInitializeWorkgroupMemory_ }, dynamicRendering{ dynamicRendering_ }, shaderIntegerDotProduct{ shaderIntegerDotProduct_ }, maintenance4{ maintenance4_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan13Features( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan13Features( *reinterpret_cast<PhysicalDeviceVulkan13Features const *>( &rhs ) )
{}
PhysicalDeviceVulkan13Features & operator=( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVulkan13Features & operator=( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
{
robustImageAccess = robustImageAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
{
inlineUniformBlock = inlineUniformBlock_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
{
pipelineCreationCacheControl = pipelineCreationCacheControl_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
{
privateData = privateData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
{
shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
{
shaderTerminateInvocation = shaderTerminateInvocation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
{
subgroupSizeControl = subgroupSizeControl_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
{
computeFullSubgroups = computeFullSubgroups_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
{
synchronization2 = synchronization2_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
{
shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
{
dynamicRendering = dynamicRendering_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
{
shaderIntegerDotProduct = shaderIntegerDotProduct_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
{
maintenance4 = maintenance4_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan13Features*>( this );
}
operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan13Features*>( this );
}
operator VkPhysicalDeviceVulkan13Features const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVulkan13Features*>( this );
}
operator VkPhysicalDeviceVulkan13Features *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVulkan13Features*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, robustImageAccess, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind, pipelineCreationCacheControl, privateData, shaderDemoteToHelperInvocation, shaderTerminateInvocation, subgroupSizeControl, computeFullSubgroups, synchronization2, textureCompressionASTC_HDR, shaderZeroInitializeWorkgroupMemory, dynamicRendering, shaderIntegerDotProduct, maintenance4 );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVulkan13Features const & ) const = default;
#else
bool operator==( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( robustImageAccess == rhs.robustImageAccess )
&& ( inlineUniformBlock == rhs.inlineUniformBlock )
&& ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind )
&& ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl )
&& ( privateData == rhs.privateData )
&& ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation )
&& ( shaderTerminateInvocation == rhs.shaderTerminateInvocation )
&& ( subgroupSizeControl == rhs.subgroupSizeControl )
&& ( computeFullSubgroups == rhs.computeFullSubgroups )
&& ( synchronization2 == rhs.synchronization2 )
&& ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR )
&& ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory )
&& ( dynamicRendering == rhs.dynamicRendering )
&& ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct )
&& ( maintenance4 == rhs.maintenance4 );
#endif
}
bool operator!=( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Features;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {};
VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {};
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {};
VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {};
VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan13Features>
{
using Type = PhysicalDeviceVulkan13Features;
};
// wrapper struct for struct VkPhysicalDeviceVulkan13Properties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan13Properties.html
struct PhysicalDeviceVulkan13Properties
{
using NativeType = VkPhysicalDeviceVulkan13Properties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Properties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties(uint32_t minSubgroupSize_ = {}, uint32_t maxSubgroupSize_ = {}, uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxInlineUniformTotalSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minSubgroupSize{ minSubgroupSize_ }, maxSubgroupSize{ maxSubgroupSize_ }, maxComputeWorkgroupSubgroups{ maxComputeWorkgroupSubgroups_ }, requiredSubgroupSizeStages{ requiredSubgroupSizeStages_ }, maxInlineUniformBlockSize{ maxInlineUniformBlockSize_ }, maxPerStageDescriptorInlineUniformBlocks{ maxPerStageDescriptorInlineUniformBlocks_ }, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks{ maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ }, maxDescriptorSetInlineUniformBlocks{ maxDescriptorSetInlineUniformBlocks_ }, maxDescriptorSetUpdateAfterBindInlineUniformBlocks{ maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ }, maxInlineUniformTotalSize{ maxInlineUniformTotalSize_ }, integerDotProduct8BitUnsignedAccelerated{ integerDotProduct8BitUnsignedAccelerated_ }, integerDotProduct8BitSignedAccelerated{ integerDotProduct8BitSignedAccelerated_ }, integerDotProduct8BitMixedSignednessAccelerated{ integerDotProduct8BitMixedSignednessAccelerated_ }, integerDotProduct4x8BitPackedUnsignedAccelerated{ integerDotProduct4x8BitPackedUnsignedAccelerated_ }, integerDotProduct4x8BitPackedSignedAccelerated{ integerDotProduct4x8BitPackedSignedAccelerated_ }, integerDotProduct4x8BitPackedMixedSignednessAccelerated{ integerDotProduct4x8BitPackedMixedSignednessAccelerated_ }, integerDotProduct16BitUnsignedAccelerated{ integerDotProduct16BitUnsignedAccelerated_ }, integerDotProduct16BitSignedAccelerated{ integerDotProduct16BitSignedAccelerated_ }, integerDotProduct16BitMixedSignednessAccelerated{ integerDotProduct16BitMixedSignednessAccelerated_ }, integerDotProduct32BitUnsignedAccelerated{ integerDotProduct32BitUnsignedAccelerated_ }, integerDotProduct32BitSignedAccelerated{ integerDotProduct32BitSignedAccelerated_ }, integerDotProduct32BitMixedSignednessAccelerated{ integerDotProduct32BitMixedSignednessAccelerated_ }, integerDotProduct64BitUnsignedAccelerated{ integerDotProduct64BitUnsignedAccelerated_ }, integerDotProduct64BitSignedAccelerated{ integerDotProduct64BitSignedAccelerated_ }, integerDotProduct64BitMixedSignednessAccelerated{ integerDotProduct64BitMixedSignednessAccelerated_ }, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ }, integerDotProductAccumulatingSaturating8BitSignedAccelerated{ integerDotProductAccumulatingSaturating8BitSignedAccelerated_ }, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ }, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ }, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ }, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ }, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ }, integerDotProductAccumulatingSaturating16BitSignedAccelerated{ integerDotProductAccumulatingSaturating16BitSignedAccelerated_ }, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ }, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ }, integerDotProductAccumulatingSaturating32BitSignedAccelerated{ integerDotProductAccumulatingSaturating32BitSignedAccelerated_ }, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ }, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ }, integerDotProductAccumulatingSaturating64BitSignedAccelerated{ integerDotProductAccumulatingSaturating64BitSignedAccelerated_ }, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ }, storageTexelBufferOffsetAlignmentBytes{ storageTexelBufferOffsetAlignmentBytes_ }, storageTexelBufferOffsetSingleTexelAlignment{ storageTexelBufferOffsetSingleTexelAlignment_ }, uniformTexelBufferOffsetAlignmentBytes{ uniformTexelBufferOffsetAlignmentBytes_ }, uniformTexelBufferOffsetSingleTexelAlignment{ uniformTexelBufferOffsetSingleTexelAlignment_ }, maxBufferSize{ maxBufferSize_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan13Properties( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan13Properties( *reinterpret_cast<PhysicalDeviceVulkan13Properties const *>( &rhs ) )
{}
PhysicalDeviceVulkan13Properties & operator=( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVulkan13Properties & operator=( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan13Properties*>( this );
}
operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan13Properties*>( this );
}
operator VkPhysicalDeviceVulkan13Properties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVulkan13Properties*>( this );
}
operator VkPhysicalDeviceVulkan13Properties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVulkan13Properties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages, maxInlineUniformBlockSize, maxPerStageDescriptorInlineUniformBlocks, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, maxDescriptorSetInlineUniformBlocks, maxDescriptorSetUpdateAfterBindInlineUniformBlocks, maxInlineUniformTotalSize, integerDotProduct8BitUnsignedAccelerated, integerDotProduct8BitSignedAccelerated, integerDotProduct8BitMixedSignednessAccelerated, integerDotProduct4x8BitPackedUnsignedAccelerated, integerDotProduct4x8BitPackedSignedAccelerated, integerDotProduct4x8BitPackedMixedSignednessAccelerated, integerDotProduct16BitUnsignedAccelerated, integerDotProduct16BitSignedAccelerated, integerDotProduct16BitMixedSignednessAccelerated, integerDotProduct32BitUnsignedAccelerated, integerDotProduct32BitSignedAccelerated, integerDotProduct32BitMixedSignednessAccelerated, integerDotProduct64BitUnsignedAccelerated, integerDotProduct64BitSignedAccelerated, integerDotProduct64BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, integerDotProductAccumulatingSaturating8BitSignedAccelerated, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, integerDotProductAccumulatingSaturating16BitSignedAccelerated, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, integerDotProductAccumulatingSaturating32BitSignedAccelerated, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, integerDotProductAccumulatingSaturating64BitSignedAccelerated, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated, storageTexelBufferOffsetAlignmentBytes, storageTexelBufferOffsetSingleTexelAlignment, uniformTexelBufferOffsetAlignmentBytes, uniformTexelBufferOffsetSingleTexelAlignment, maxBufferSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVulkan13Properties const & ) const = default;
#else
bool operator==( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minSubgroupSize == rhs.minSubgroupSize )
&& ( maxSubgroupSize == rhs.maxSubgroupSize )
&& ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups )
&& ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages )
&& ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize )
&& ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks )
&& ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks )
&& ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks )
&& ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks )
&& ( maxInlineUniformTotalSize == rhs.maxInlineUniformTotalSize )
&& ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated )
&& ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated )
&& ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated )
&& ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated )
&& ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated )
&& ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated )
&& ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated )
&& ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated )
&& ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated )
&& ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated )
&& ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated )
&& ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated )
&& ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated )
&& ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated )
&& ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated )
&& ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated )
&& ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated )
&& ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated )
&& ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated )
&& ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated )
&& ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated )
&& ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated )
&& ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated )
&& ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated )
&& ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated )
&& ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated )
&& ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated )
&& ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated )
&& ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated )
&& ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated )
&& ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes )
&& ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment )
&& ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes )
&& ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment )
&& ( maxBufferSize == rhs.maxBufferSize );
#endif
}
bool operator!=( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Properties;
void * pNext = {};
uint32_t minSubgroupSize = {};
uint32_t maxSubgroupSize = {};
uint32_t maxComputeWorkgroupSubgroups = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
uint32_t maxInlineUniformBlockSize = {};
uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
uint32_t maxDescriptorSetInlineUniformBlocks = {};
uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
uint32_t maxInlineUniformTotalSize = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {};
VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan13Properties>
{
using Type = PhysicalDeviceVulkan13Properties;
};
// wrapper struct for struct VkPhysicalDeviceVulkan14Features, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan14Features.html
struct PhysicalDeviceVulkan14Features
{
using NativeType = VkPhysicalDeviceVulkan14Features;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan14Features;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan14Features(VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ = {}, VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pushDescriptor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, globalPriorityQuery{ globalPriorityQuery_ }, shaderSubgroupRotate{ shaderSubgroupRotate_ }, shaderSubgroupRotateClustered{ shaderSubgroupRotateClustered_ }, shaderFloatControls2{ shaderFloatControls2_ }, shaderExpectAssume{ shaderExpectAssume_ }, rectangularLines{ rectangularLines_ }, bresenhamLines{ bresenhamLines_ }, smoothLines{ smoothLines_ }, stippledRectangularLines{ stippledRectangularLines_ }, stippledBresenhamLines{ stippledBresenhamLines_ }, stippledSmoothLines{ stippledSmoothLines_ }, vertexAttributeInstanceRateDivisor{ vertexAttributeInstanceRateDivisor_ }, vertexAttributeInstanceRateZeroDivisor{ vertexAttributeInstanceRateZeroDivisor_ }, indexTypeUint8{ indexTypeUint8_ }, dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ }, maintenance5{ maintenance5_ }, maintenance6{ maintenance6_ }, pipelineProtectedAccess{ pipelineProtectedAccess_ }, pipelineRobustness{ pipelineRobustness_ }, hostImageCopy{ hostImageCopy_ }, pushDescriptor{ pushDescriptor_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan14Features( PhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan14Features( VkPhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan14Features( *reinterpret_cast<PhysicalDeviceVulkan14Features const *>( &rhs ) )
{}
PhysicalDeviceVulkan14Features & operator=( PhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVulkan14Features & operator=( VkPhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Features const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT
{
globalPriorityQuery = globalPriorityQuery_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderSubgroupRotate( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupRotate = shaderSubgroupRotate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderSubgroupRotateClustered( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupRotateClustered = shaderSubgroupRotateClustered_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderFloatControls2( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloatControls2 = shaderFloatControls2_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderExpectAssume( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ ) VULKAN_HPP_NOEXCEPT
{
shaderExpectAssume = shaderExpectAssume_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
{
rectangularLines = rectangularLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
{
bresenhamLines = bresenhamLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
{
smoothLines = smoothLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
{
stippledRectangularLines = stippledRectangularLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
{
stippledBresenhamLines = stippledBresenhamLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
{
stippledSmoothLines = stippledSmoothLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
{
indexTypeUint8 = indexTypeUint8_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setDynamicRenderingLocalRead( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ ) VULKAN_HPP_NOEXCEPT
{
dynamicRenderingLocalRead = dynamicRenderingLocalRead_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setMaintenance5( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT
{
maintenance5 = maintenance5_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setMaintenance6( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ ) VULKAN_HPP_NOEXCEPT
{
maintenance6 = maintenance6_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPipelineProtectedAccess( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT
{
pipelineProtectedAccess = pipelineProtectedAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPipelineRobustness( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT
{
pipelineRobustness = pipelineRobustness_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setHostImageCopy( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT
{
hostImageCopy = hostImageCopy_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPushDescriptor( VULKAN_HPP_NAMESPACE::Bool32 pushDescriptor_ ) VULKAN_HPP_NOEXCEPT
{
pushDescriptor = pushDescriptor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVulkan14Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan14Features*>( this );
}
operator VkPhysicalDeviceVulkan14Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan14Features*>( this );
}
operator VkPhysicalDeviceVulkan14Features const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVulkan14Features*>( this );
}
operator VkPhysicalDeviceVulkan14Features *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVulkan14Features*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, globalPriorityQuery, shaderSubgroupRotate, shaderSubgroupRotateClustered, shaderFloatControls2, shaderExpectAssume, rectangularLines, bresenhamLines, smoothLines, stippledRectangularLines, stippledBresenhamLines, stippledSmoothLines, vertexAttributeInstanceRateDivisor, vertexAttributeInstanceRateZeroDivisor, indexTypeUint8, dynamicRenderingLocalRead, maintenance5, maintenance6, pipelineProtectedAccess, pipelineRobustness, hostImageCopy, pushDescriptor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVulkan14Features const & ) const = default;
#else
bool operator==( PhysicalDeviceVulkan14Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( globalPriorityQuery == rhs.globalPriorityQuery )
&& ( shaderSubgroupRotate == rhs.shaderSubgroupRotate )
&& ( shaderSubgroupRotateClustered == rhs.shaderSubgroupRotateClustered )
&& ( shaderFloatControls2 == rhs.shaderFloatControls2 )
&& ( shaderExpectAssume == rhs.shaderExpectAssume )
&& ( rectangularLines == rhs.rectangularLines )
&& ( bresenhamLines == rhs.bresenhamLines )
&& ( smoothLines == rhs.smoothLines )
&& ( stippledRectangularLines == rhs.stippledRectangularLines )
&& ( stippledBresenhamLines == rhs.stippledBresenhamLines )
&& ( stippledSmoothLines == rhs.stippledSmoothLines )
&& ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor )
&& ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor )
&& ( indexTypeUint8 == rhs.indexTypeUint8 )
&& ( dynamicRenderingLocalRead == rhs.dynamicRenderingLocalRead )
&& ( maintenance5 == rhs.maintenance5 )
&& ( maintenance6 == rhs.maintenance6 )
&& ( pipelineProtectedAccess == rhs.pipelineProtectedAccess )
&& ( pipelineRobustness == rhs.pipelineRobustness )
&& ( hostImageCopy == rhs.hostImageCopy )
&& ( pushDescriptor == rhs.pushDescriptor );
#endif
}
bool operator!=( PhysicalDeviceVulkan14Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan14Features;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume = {};
VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {};
VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {};
VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {};
VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {};
VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {};
VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {};
VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {};
VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead = {};
VULKAN_HPP_NAMESPACE::Bool32 maintenance5 = {};
VULKAN_HPP_NAMESPACE::Bool32 maintenance6 = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness = {};
VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy = {};
VULKAN_HPP_NAMESPACE::Bool32 pushDescriptor = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan14Features>
{
using Type = PhysicalDeviceVulkan14Features;
};
// wrapper struct for struct VkPhysicalDeviceVulkan14Properties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan14Properties.html
struct PhysicalDeviceVulkan14Properties
{
using NativeType = VkPhysicalDeviceVulkan14Properties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan14Properties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Properties(uint32_t lineSubPixelPrecisionBits_ = {}, uint32_t maxVertexAttribDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance_ = {}, uint32_t maxPushDescriptors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadDepthStencilAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadMultisampledAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {}, VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport_ = {}, VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {}, VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers_ = {}, uint32_t maxCombinedImageSamplerDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs_ = {}, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault, uint32_t copySrcLayoutCount_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ = {}, uint32_t copyDstLayoutCount_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & optimalTilingLayoutUUID_ = {}, VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, lineSubPixelPrecisionBits{ lineSubPixelPrecisionBits_ }, maxVertexAttribDivisor{ maxVertexAttribDivisor_ }, supportsNonZeroFirstInstance{ supportsNonZeroFirstInstance_ }, maxPushDescriptors{ maxPushDescriptors_ }, dynamicRenderingLocalReadDepthStencilAttachments{ dynamicRenderingLocalReadDepthStencilAttachments_ }, dynamicRenderingLocalReadMultisampledAttachments{ dynamicRenderingLocalReadMultisampledAttachments_ }, earlyFragmentMultisampleCoverageAfterSampleCounting{ earlyFragmentMultisampleCoverageAfterSampleCounting_ }, earlyFragmentSampleMaskTestBeforeSampleCounting{ earlyFragmentSampleMaskTestBeforeSampleCounting_ }, depthStencilSwizzleOneSupport{ depthStencilSwizzleOneSupport_ }, polygonModePointSize{ polygonModePointSize_ }, nonStrictSinglePixelWideLinesUseParallelogram{ nonStrictSinglePixelWideLinesUseParallelogram_ }, nonStrictWideLinesUseParallelogram{ nonStrictWideLinesUseParallelogram_ }, blockTexelViewCompatibleMultipleLayers{ blockTexelViewCompatibleMultipleLayers_ }, maxCombinedImageSamplerDescriptorCount{ maxCombinedImageSamplerDescriptorCount_ }, fragmentShadingRateClampCombinerInputs{ fragmentShadingRateClampCombinerInputs_ }, defaultRobustnessStorageBuffers{ defaultRobustnessStorageBuffers_ }, defaultRobustnessUniformBuffers{ defaultRobustnessUniformBuffers_ }, defaultRobustnessVertexInputs{ defaultRobustnessVertexInputs_ }, defaultRobustnessImages{ defaultRobustnessImages_ }, copySrcLayoutCount{ copySrcLayoutCount_ }, pCopySrcLayouts{ pCopySrcLayouts_ }, copyDstLayoutCount{ copyDstLayoutCount_ }, pCopyDstLayouts{ pCopyDstLayouts_ }, optimalTilingLayoutUUID{ optimalTilingLayoutUUID_ }, identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ }
{}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Properties( PhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan14Properties( VkPhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan14Properties( *reinterpret_cast<PhysicalDeviceVulkan14Properties const *>( &rhs ) )
{}
PhysicalDeviceVulkan14Properties & operator=( PhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVulkan14Properties & operator=( VkPhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Properties const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceVulkan14Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan14Properties*>( this );
}
operator VkPhysicalDeviceVulkan14Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan14Properties*>( this );
}
operator VkPhysicalDeviceVulkan14Properties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVulkan14Properties*>( this );
}
operator VkPhysicalDeviceVulkan14Properties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVulkan14Properties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageLayout * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageLayout * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, lineSubPixelPrecisionBits, maxVertexAttribDivisor, supportsNonZeroFirstInstance, maxPushDescriptors, dynamicRenderingLocalReadDepthStencilAttachments, dynamicRenderingLocalReadMultisampledAttachments, earlyFragmentMultisampleCoverageAfterSampleCounting, earlyFragmentSampleMaskTestBeforeSampleCounting, depthStencilSwizzleOneSupport, polygonModePointSize, nonStrictSinglePixelWideLinesUseParallelogram, nonStrictWideLinesUseParallelogram, blockTexelViewCompatibleMultipleLayers, maxCombinedImageSamplerDescriptorCount, fragmentShadingRateClampCombinerInputs, defaultRobustnessStorageBuffers, defaultRobustnessUniformBuffers, defaultRobustnessVertexInputs, defaultRobustnessImages, copySrcLayoutCount, pCopySrcLayouts, copyDstLayoutCount, pCopyDstLayouts, optimalTilingLayoutUUID, identicalMemoryTypeRequirements );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVulkan14Properties const & ) const = default;
#else
bool operator==( PhysicalDeviceVulkan14Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits )
&& ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor )
&& ( supportsNonZeroFirstInstance == rhs.supportsNonZeroFirstInstance )
&& ( maxPushDescriptors == rhs.maxPushDescriptors )
&& ( dynamicRenderingLocalReadDepthStencilAttachments == rhs.dynamicRenderingLocalReadDepthStencilAttachments )
&& ( dynamicRenderingLocalReadMultisampledAttachments == rhs.dynamicRenderingLocalReadMultisampledAttachments )
&& ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting )
&& ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting )
&& ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport )
&& ( polygonModePointSize == rhs.polygonModePointSize )
&& ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram )
&& ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram )
&& ( blockTexelViewCompatibleMultipleLayers == rhs.blockTexelViewCompatibleMultipleLayers )
&& ( maxCombinedImageSamplerDescriptorCount == rhs.maxCombinedImageSamplerDescriptorCount )
&& ( fragmentShadingRateClampCombinerInputs == rhs.fragmentShadingRateClampCombinerInputs )
&& ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers )
&& ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers )
&& ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs )
&& ( defaultRobustnessImages == rhs.defaultRobustnessImages )
&& ( copySrcLayoutCount == rhs.copySrcLayoutCount )
&& ( pCopySrcLayouts == rhs.pCopySrcLayouts )
&& ( copyDstLayoutCount == rhs.copyDstLayoutCount )
&& ( pCopyDstLayouts == rhs.pCopyDstLayouts )
&& ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID )
&& ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements );
#endif
}
bool operator!=( PhysicalDeviceVulkan14Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan14Properties;
void * pNext = {};
uint32_t lineSubPixelPrecisionBits = {};
uint32_t maxVertexAttribDivisor = {};
VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance = {};
uint32_t maxPushDescriptors = {};
VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadDepthStencilAttachments = {};
VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadMultisampledAttachments = {};
VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting = {};
VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting = {};
VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport = {};
VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize = {};
VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram = {};
VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram = {};
VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers = {};
uint32_t maxCombinedImageSamplerDescriptorCount = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs = {};
VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault;
VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault;
VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault;
VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault;
uint32_t copySrcLayoutCount = {};
VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts = {};
uint32_t copyDstLayoutCount = {};
VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> optimalTilingLayoutUUID = {};
VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan14Properties>
{
using Type = PhysicalDeviceVulkan14Properties;
};
// wrapper struct for struct VkPhysicalDeviceVulkanMemoryModelFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkanMemoryModelFeatures.html
struct PhysicalDeviceVulkanMemoryModelFeatures
{
using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, vulkanMemoryModel{ vulkanMemoryModel_ }, vulkanMemoryModelDeviceScope{ vulkanMemoryModelDeviceScope_ }, vulkanMemoryModelAvailabilityVisibilityChains{ vulkanMemoryModelAvailabilityVisibilityChains_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkanMemoryModelFeatures( *reinterpret_cast<PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs ) )
{}
PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModel = vulkanMemoryModel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
}
operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
}
operator VkPhysicalDeviceVulkanMemoryModelFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
}
operator VkPhysicalDeviceVulkanMemoryModelFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( vulkanMemoryModel == rhs.vulkanMemoryModel )
&& ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
&& ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains );
#endif
}
bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkanMemoryModelFeatures>
{
using Type = PhysicalDeviceVulkanMemoryModelFeatures;
};
using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;
// wrapper struct for struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.html
struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR
{
using NativeType = VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, workgroupMemoryExplicitLayout{ workgroupMemoryExplicitLayout_ }, workgroupMemoryExplicitLayoutScalarBlockLayout{ workgroupMemoryExplicitLayoutScalarBlockLayout_ }, workgroupMemoryExplicitLayout8BitAccess{ workgroupMemoryExplicitLayout8BitAccess_ }, workgroupMemoryExplicitLayout16BitAccess{ workgroupMemoryExplicitLayout16BitAccess_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( *reinterpret_cast<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs ) )
{}
PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT
{
workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayoutScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
{
workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR*>( this );
}
operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR*>( this );
}
operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR*>( this );
}
operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, workgroupMemoryExplicitLayout, workgroupMemoryExplicitLayoutScalarBlockLayout, workgroupMemoryExplicitLayout8BitAccess, workgroupMemoryExplicitLayout16BitAccess );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & ) const = default;
#else
bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout )
&& ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout )
&& ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess )
&& ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess );
#endif
}
bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {};
VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {};
VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>
{
using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
};
// wrapper struct for struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT.html
struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT
{
using NativeType = VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, ycbcr2plane444Formats{ ycbcr2plane444Formats_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT
{
ycbcr2plane444Formats = ycbcr2plane444Formats_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT*>( this );
}
operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT*>( this );
}
operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT*>( this );
}
operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, ycbcr2plane444Formats );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats );
#endif
}
bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>
{
using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceYcbcrDegammaFeaturesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceYcbcrDegammaFeaturesQCOM.html
struct PhysicalDeviceYcbcrDegammaFeaturesQCOM
{
using NativeType = VkPhysicalDeviceYcbcrDegammaFeaturesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrDegammaFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, ycbcrDegamma{ ycbcrDegamma_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrDegammaFeaturesQCOM( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceYcbcrDegammaFeaturesQCOM( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceYcbcrDegammaFeaturesQCOM( *reinterpret_cast<PhysicalDeviceYcbcrDegammaFeaturesQCOM const *>( &rhs ) )
{}
PhysicalDeviceYcbcrDegammaFeaturesQCOM & operator=( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceYcbcrDegammaFeaturesQCOM & operator=( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrDegammaFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrDegammaFeaturesQCOM & setYcbcrDegamma( VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma_ ) VULKAN_HPP_NOEXCEPT
{
ycbcrDegamma = ycbcrDegamma_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceYcbcrDegammaFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceYcbcrDegammaFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceYcbcrDegammaFeaturesQCOM*>( this );
}
operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceYcbcrDegammaFeaturesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, ycbcrDegamma );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & ) const = default;
#else
bool operator==( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( ycbcrDegamma == rhs.ycbcrDegamma );
#endif
}
bool operator!=( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM>
{
using Type = PhysicalDeviceYcbcrDegammaFeaturesQCOM;
};
// wrapper struct for struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceYcbcrImageArraysFeaturesEXT.html
struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
{
using NativeType = VkPhysicalDeviceYcbcrImageArraysFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, ycbcrImageArrays{ ycbcrImageArrays_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceYcbcrImageArraysFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT
{
ycbcrImageArrays = ycbcrImageArrays_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
}
operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
}
operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
}
operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, ycbcrImageArrays );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( ycbcrImageArrays == rhs.ycbcrImageArrays );
#endif
}
bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT>
{
using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT.html
struct PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT
{
using NativeType = VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 zeroInitializeDeviceMemory_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, zeroInitializeDeviceMemory{ zeroInitializeDeviceMemory_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT( PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT( VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT( *reinterpret_cast<PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const *>( &rhs ) )
{}
PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT & operator=( PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT & operator=( VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT & setZeroInitializeDeviceMemory( VULKAN_HPP_NAMESPACE::Bool32 zeroInitializeDeviceMemory_ ) VULKAN_HPP_NOEXCEPT
{
zeroInitializeDeviceMemory = zeroInitializeDeviceMemory_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT*>( this );
}
operator VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT*>( this );
}
operator VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT*>( this );
}
operator VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, zeroInitializeDeviceMemory );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( zeroInitializeDeviceMemory == rhs.zeroInitializeDeviceMemory );
#endif
}
bool operator!=( PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 zeroInitializeDeviceMemory = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT>
{
using Type = PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT;
};
// wrapper struct for struct VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures.html
struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures
{
using NativeType = VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shaderZeroInitializeWorkgroupMemory{ shaderZeroInitializeWorkgroupMemory_ }
{}
VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( *reinterpret_cast<PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs ) )
{}
PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
{
shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures*>( this );
}
operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures*>( this );
}
operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures*>( this );
}
operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderZeroInitializeWorkgroupMemory );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & ) const = default;
#else
bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory );
#endif
}
bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>
{
using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
};
using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
// wrapper struct for struct VkPipelineBinaryKeyKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryKeyKHR.html
struct PipelineBinaryKeyKHR
{
using NativeType = VkPipelineBinaryKeyKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryKeyKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR(uint32_t keySize_ = {}, std::array<uint8_t,VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR> const & key_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, keySize{ keySize_ }, key{ key_ }
{}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR( PipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineBinaryKeyKHR( VkPipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineBinaryKeyKHR( *reinterpret_cast<PipelineBinaryKeyKHR const *>( &rhs ) )
{}
PipelineBinaryKeyKHR & operator=( PipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineBinaryKeyKHR & operator=( VkPipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setKeySize( uint32_t keySize_ ) VULKAN_HPP_NOEXCEPT
{
keySize = keySize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setKey( std::array<uint8_t,VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR> key_ ) VULKAN_HPP_NOEXCEPT
{
key = key_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineBinaryKeyKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineBinaryKeyKHR*>( this );
}
operator VkPipelineBinaryKeyKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineBinaryKeyKHR*>( this );
}
operator VkPipelineBinaryKeyKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineBinaryKeyKHR*>( this );
}
operator VkPipelineBinaryKeyKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineBinaryKeyKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, keySize, key );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineBinaryKeyKHR const & ) const = default;
#else
bool operator==( PipelineBinaryKeyKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( keySize == rhs.keySize )
&& ( key == rhs.key );
#endif
}
bool operator!=( PipelineBinaryKeyKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryKeyKHR;
void * pNext = {};
uint32_t keySize = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR> key = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineBinaryKeyKHR>
{
using Type = PipelineBinaryKeyKHR;
};
// wrapper struct for struct VkPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryDataKHR.html
struct PipelineBinaryDataKHR
{
using NativeType = VkPipelineBinaryDataKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineBinaryDataKHR(size_t dataSize_ = {}, void * pData_ = {}) VULKAN_HPP_NOEXCEPT
: dataSize{ dataSize_ }, pData{ pData_ }
{}
VULKAN_HPP_CONSTEXPR PipelineBinaryDataKHR( PipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineBinaryDataKHR( VkPipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineBinaryDataKHR( *reinterpret_cast<PipelineBinaryDataKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
PipelineBinaryDataKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<T> const & data_ )
: dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineBinaryDataKHR & operator=( PipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineBinaryDataKHR & operator=( VkPipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataKHR & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = dataSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataKHR & setPData( void * pData_ ) VULKAN_HPP_NOEXCEPT
{
pData = pData_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
PipelineBinaryDataKHR & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<T> const & data_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = data_.size() * sizeof(T);
pData = data_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineBinaryDataKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineBinaryDataKHR*>( this );
}
operator VkPipelineBinaryDataKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineBinaryDataKHR*>( this );
}
operator VkPipelineBinaryDataKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineBinaryDataKHR*>( this );
}
operator VkPipelineBinaryDataKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineBinaryDataKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<size_t const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( dataSize, pData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineBinaryDataKHR const & ) const = default;
#else
bool operator==( PipelineBinaryDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( dataSize == rhs.dataSize )
&& ( pData == rhs.pData );
#endif
}
bool operator!=( PipelineBinaryDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
size_t dataSize = {};
void * pData = {};
};
// wrapper struct for struct VkPipelineBinaryKeysAndDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryKeysAndDataKHR.html
struct PipelineBinaryKeysAndDataKHR
{
using NativeType = VkPipelineBinaryKeysAndDataKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR(uint32_t binaryCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKeys_ = {}, const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * pPipelineBinaryData_ = {}) VULKAN_HPP_NOEXCEPT
: binaryCount{ binaryCount_ }, pPipelineBinaryKeys{ pPipelineBinaryKeys_ }, pPipelineBinaryData{ pPipelineBinaryData_ }
{}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR( PipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineBinaryKeysAndDataKHR( VkPipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineBinaryKeysAndDataKHR( *reinterpret_cast<PipelineBinaryKeysAndDataKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineBinaryKeysAndDataKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR> const & pipelineBinaryKeys_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR> const & pipelineBinaryData_ = {} )
: binaryCount( static_cast<uint32_t>( pipelineBinaryKeys_.size() ) ), pPipelineBinaryKeys( pipelineBinaryKeys_.data() ), pPipelineBinaryData( pipelineBinaryData_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( pipelineBinaryKeys_.size() == pipelineBinaryData_.size() );
#else
if ( pipelineBinaryKeys_.size() != pipelineBinaryData_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::PipelineBinaryKeysAndDataKHR::PipelineBinaryKeysAndDataKHR: pipelineBinaryKeys_.size() != pipelineBinaryData_.size()" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineBinaryKeysAndDataKHR & operator=( PipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineBinaryKeysAndDataKHR & operator=( VkPipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & setBinaryCount( uint32_t binaryCount_ ) VULKAN_HPP_NOEXCEPT
{
binaryCount = binaryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & setPPipelineBinaryKeys( const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKeys_ ) VULKAN_HPP_NOEXCEPT
{
pPipelineBinaryKeys = pPipelineBinaryKeys_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineBinaryKeysAndDataKHR & setPipelineBinaryKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR> const & pipelineBinaryKeys_ ) VULKAN_HPP_NOEXCEPT
{
binaryCount = static_cast<uint32_t>( pipelineBinaryKeys_.size() );
pPipelineBinaryKeys = pipelineBinaryKeys_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & setPPipelineBinaryData( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * pPipelineBinaryData_ ) VULKAN_HPP_NOEXCEPT
{
pPipelineBinaryData = pPipelineBinaryData_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineBinaryKeysAndDataKHR & setPipelineBinaryData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR> const & pipelineBinaryData_ ) VULKAN_HPP_NOEXCEPT
{
binaryCount = static_cast<uint32_t>( pipelineBinaryData_.size() );
pPipelineBinaryData = pipelineBinaryData_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineBinaryKeysAndDataKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineBinaryKeysAndDataKHR*>( this );
}
operator VkPipelineBinaryKeysAndDataKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineBinaryKeysAndDataKHR*>( this );
}
operator VkPipelineBinaryKeysAndDataKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineBinaryKeysAndDataKHR*>( this );
}
operator VkPipelineBinaryKeysAndDataKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineBinaryKeysAndDataKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * const &, const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( binaryCount, pPipelineBinaryKeys, pPipelineBinaryData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineBinaryKeysAndDataKHR const & ) const = default;
#else
bool operator==( PipelineBinaryKeysAndDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( binaryCount == rhs.binaryCount )
&& ( pPipelineBinaryKeys == rhs.pPipelineBinaryKeys )
&& ( pPipelineBinaryData == rhs.pPipelineBinaryData );
#endif
}
bool operator!=( PipelineBinaryKeysAndDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t binaryCount = {};
const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKeys = {};
const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * pPipelineBinaryData = {};
};
// wrapper struct for struct VkPipelineCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreateInfoKHR.html
struct PipelineCreateInfoKHR
{
using NativeType = VkPipelineCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineCreateInfoKHR(void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }
{}
VULKAN_HPP_CONSTEXPR PipelineCreateInfoKHR( PipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCreateInfoKHR( VkPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCreateInfoKHR( *reinterpret_cast<PipelineCreateInfoKHR const *>( &rhs ) )
{}
PipelineCreateInfoKHR & operator=( PipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineCreateInfoKHR & operator=( VkPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineCreateInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCreateInfoKHR*>( this );
}
operator VkPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCreateInfoKHR*>( this );
}
operator VkPipelineCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineCreateInfoKHR*>( this );
}
operator VkPipelineCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineCreateInfoKHR const & ) const = default;
#else
bool operator==( PipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext );
#endif
}
bool operator!=( PipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreateInfoKHR;
void * pNext = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineCreateInfoKHR>
{
using Type = PipelineCreateInfoKHR;
};
// wrapper struct for struct VkPipelineBinaryCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryCreateInfoKHR.html
struct PipelineBinaryCreateInfoKHR
{
using NativeType = VkPipelineBinaryCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR(const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo_ = {}, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pKeysAndDataInfo{ pKeysAndDataInfo_ }, pipeline{ pipeline_ }, pPipelineCreateInfo{ pPipelineCreateInfo_ }
{}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR( PipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineBinaryCreateInfoKHR( VkPipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineBinaryCreateInfoKHR( *reinterpret_cast<PipelineBinaryCreateInfoKHR const *>( &rhs ) )
{}
PipelineBinaryCreateInfoKHR & operator=( PipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineBinaryCreateInfoKHR & operator=( VkPipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPKeysAndDataInfo( const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo_ ) VULKAN_HPP_NOEXCEPT
{
pKeysAndDataInfo = pKeysAndDataInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPPipelineCreateInfo( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo_ ) VULKAN_HPP_NOEXCEPT
{
pPipelineCreateInfo = pPipelineCreateInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineBinaryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineBinaryCreateInfoKHR*>( this );
}
operator VkPipelineBinaryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineBinaryCreateInfoKHR*>( this );
}
operator VkPipelineBinaryCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineBinaryCreateInfoKHR*>( this );
}
operator VkPipelineBinaryCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineBinaryCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * const &, VULKAN_HPP_NAMESPACE::Pipeline const &, const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pKeysAndDataInfo, pipeline, pPipelineCreateInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineBinaryCreateInfoKHR const & ) const = default;
#else
bool operator==( PipelineBinaryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pKeysAndDataInfo == rhs.pKeysAndDataInfo )
&& ( pipeline == rhs.pipeline )
&& ( pPipelineCreateInfo == rhs.pPipelineCreateInfo );
#endif
}
bool operator!=( PipelineBinaryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryCreateInfoKHR;
const void * pNext = {};
const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo = {};
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineBinaryCreateInfoKHR>
{
using Type = PipelineBinaryCreateInfoKHR;
};
// wrapper struct for struct VkPipelineBinaryDataInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryDataInfoKHR.html
struct PipelineBinaryDataInfoKHR
{
using NativeType = VkPipelineBinaryDataInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryDataInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineBinaryDataInfoKHR(VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineBinary{ pipelineBinary_ }
{}
VULKAN_HPP_CONSTEXPR PipelineBinaryDataInfoKHR( PipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineBinaryDataInfoKHR( VkPipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineBinaryDataInfoKHR( *reinterpret_cast<PipelineBinaryDataInfoKHR const *>( &rhs ) )
{}
PipelineBinaryDataInfoKHR & operator=( PipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineBinaryDataInfoKHR & operator=( VkPipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataInfoKHR & setPipelineBinary( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBinary = pipelineBinary_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineBinaryDataInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineBinaryDataInfoKHR*>( this );
}
operator VkPipelineBinaryDataInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineBinaryDataInfoKHR*>( this );
}
operator VkPipelineBinaryDataInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineBinaryDataInfoKHR*>( this );
}
operator VkPipelineBinaryDataInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineBinaryDataInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineBinaryKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineBinary );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineBinaryDataInfoKHR const & ) const = default;
#else
bool operator==( PipelineBinaryDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineBinary == rhs.pipelineBinary );
#endif
}
bool operator!=( PipelineBinaryDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryDataInfoKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineBinaryDataInfoKHR>
{
using Type = PipelineBinaryDataInfoKHR;
};
// wrapper struct for struct VkPipelineBinaryHandlesInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryHandlesInfoKHR.html
struct PipelineBinaryHandlesInfoKHR
{
using NativeType = VkPipelineBinaryHandlesInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryHandlesInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineBinaryHandlesInfoKHR(uint32_t pipelineBinaryCount_ = {}, VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineBinaryCount{ pipelineBinaryCount_ }, pPipelineBinaries{ pPipelineBinaries_ }
{}
VULKAN_HPP_CONSTEXPR PipelineBinaryHandlesInfoKHR( PipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineBinaryHandlesInfoKHR( VkPipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineBinaryHandlesInfoKHR( *reinterpret_cast<PipelineBinaryHandlesInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineBinaryHandlesInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR> const & pipelineBinaries_, const void * pNext_ = nullptr )
: pNext( pNext_ ), pipelineBinaryCount( static_cast<uint32_t>( pipelineBinaries_.size() ) ), pPipelineBinaries( pipelineBinaries_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineBinaryHandlesInfoKHR & operator=( PipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineBinaryHandlesInfoKHR & operator=( VkPipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & setPipelineBinaryCount( uint32_t pipelineBinaryCount_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBinaryCount = pipelineBinaryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & setPPipelineBinaries( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ ) VULKAN_HPP_NOEXCEPT
{
pPipelineBinaries = pPipelineBinaries_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineBinaryHandlesInfoKHR & setPipelineBinaries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineBinaryKHR> const & pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBinaryCount = static_cast<uint32_t>( pipelineBinaries_.size() );
pPipelineBinaries = pipelineBinaries_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineBinaryHandlesInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineBinaryHandlesInfoKHR*>( this );
}
operator VkPipelineBinaryHandlesInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineBinaryHandlesInfoKHR*>( this );
}
operator VkPipelineBinaryHandlesInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineBinaryHandlesInfoKHR*>( this );
}
operator VkPipelineBinaryHandlesInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineBinaryHandlesInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineBinaryCount, pPipelineBinaries );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineBinaryHandlesInfoKHR const & ) const = default;
#else
bool operator==( PipelineBinaryHandlesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineBinaryCount == rhs.pipelineBinaryCount )
&& ( pPipelineBinaries == rhs.pPipelineBinaries );
#endif
}
bool operator!=( PipelineBinaryHandlesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryHandlesInfoKHR;
const void * pNext = {};
uint32_t pipelineBinaryCount = {};
VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineBinaryHandlesInfoKHR>
{
using Type = PipelineBinaryHandlesInfoKHR;
};
// wrapper struct for struct VkPipelineBinaryInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryInfoKHR.html
struct PipelineBinaryInfoKHR
{
using NativeType = VkPipelineBinaryInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineBinaryInfoKHR(uint32_t binaryCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, binaryCount{ binaryCount_ }, pPipelineBinaries{ pPipelineBinaries_ }
{}
VULKAN_HPP_CONSTEXPR PipelineBinaryInfoKHR( PipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineBinaryInfoKHR( VkPipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineBinaryInfoKHR( *reinterpret_cast<PipelineBinaryInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineBinaryInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR> const & pipelineBinaries_, const void * pNext_ = nullptr )
: pNext( pNext_ ), binaryCount( static_cast<uint32_t>( pipelineBinaries_.size() ) ), pPipelineBinaries( pipelineBinaries_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineBinaryInfoKHR & operator=( PipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineBinaryInfoKHR & operator=( VkPipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineBinaryInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & setBinaryCount( uint32_t binaryCount_ ) VULKAN_HPP_NOEXCEPT
{
binaryCount = binaryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & setPPipelineBinaries( const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ ) VULKAN_HPP_NOEXCEPT
{
pPipelineBinaries = pPipelineBinaries_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineBinaryInfoKHR & setPipelineBinaries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR> const & pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT
{
binaryCount = static_cast<uint32_t>( pipelineBinaries_.size() );
pPipelineBinaries = pipelineBinaries_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineBinaryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineBinaryInfoKHR*>( this );
}
operator VkPipelineBinaryInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineBinaryInfoKHR*>( this );
}
operator VkPipelineBinaryInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineBinaryInfoKHR*>( this );
}
operator VkPipelineBinaryInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineBinaryInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, binaryCount, pPipelineBinaries );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineBinaryInfoKHR const & ) const = default;
#else
bool operator==( PipelineBinaryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( binaryCount == rhs.binaryCount )
&& ( pPipelineBinaries == rhs.pPipelineBinaries );
#endif
}
bool operator!=( PipelineBinaryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryInfoKHR;
const void * pNext = {};
uint32_t binaryCount = {};
const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineBinaryInfoKHR>
{
using Type = PipelineBinaryInfoKHR;
};
// wrapper struct for struct VkPipelineCacheCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCacheCreateInfo.html
struct PipelineCacheCreateInfo
{
using NativeType = VkPipelineCacheCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, size_t initialDataSize_ = {}, const void * pInitialData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, initialDataSize{ initialDataSize_ }, pInitialData{ pInitialData_ }
{}
VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCacheCreateInfo( *reinterpret_cast<PipelineCacheCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
{
initialDataSize = initialDataSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
{
pInitialData = pInitialData_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
{
initialDataSize = initialData_.size() * sizeof(T);
pInitialData = initialData_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCacheCreateInfo*>( this );
}
operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCacheCreateInfo*>( this );
}
operator VkPipelineCacheCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineCacheCreateInfo*>( this );
}
operator VkPipelineCacheCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineCacheCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags const &, size_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, initialDataSize, pInitialData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineCacheCreateInfo const & ) const = default;
#else
bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( initialDataSize == rhs.initialDataSize )
&& ( pInitialData == rhs.pInitialData );
#endif
}
bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {};
size_t initialDataSize = {};
const void * pInitialData = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineCacheCreateInfo>
{
using Type = PipelineCacheCreateInfo;
};
// wrapper struct for struct VkPipelineCacheHeaderVersionOne, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCacheHeaderVersionOne.html
struct PipelineCacheHeaderVersionOne
{
using NativeType = VkPipelineCacheHeaderVersionOne;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne(uint32_t headerSize_ = {}, VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & pipelineCacheUUID_ = {}) VULKAN_HPP_NOEXCEPT
: headerSize{ headerSize_ }, headerVersion{ headerVersion_ }, vendorID{ vendorID_ }, deviceID{ deviceID_ }, pipelineCacheUUID{ pipelineCacheUUID_ }
{}
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCacheHeaderVersionOne( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCacheHeaderVersionOne( *reinterpret_cast<PipelineCacheHeaderVersionOne const *>( &rhs ) )
{}
PipelineCacheHeaderVersionOne & operator=( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineCacheHeaderVersionOne & operator=( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT
{
headerSize = headerSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderVersion( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT
{
headerVersion = headerVersion_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT
{
vendorID = vendorID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT
{
deviceID = deviceID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setPipelineCacheUUID( std::array<uint8_t,VK_UUID_SIZE> pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT
{
pipelineCacheUUID = pipelineCacheUUID_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCacheHeaderVersionOne*>( this );
}
operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCacheHeaderVersionOne*>( this );
}
operator VkPipelineCacheHeaderVersionOne const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineCacheHeaderVersionOne*>( this );
}
operator VkPipelineCacheHeaderVersionOne *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineCacheHeaderVersionOne*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( headerSize, headerVersion, vendorID, deviceID, pipelineCacheUUID );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineCacheHeaderVersionOne const & ) const = default;
#else
bool operator==( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( headerSize == rhs.headerSize )
&& ( headerVersion == rhs.headerVersion )
&& ( vendorID == rhs.vendorID )
&& ( deviceID == rhs.deviceID )
&& ( pipelineCacheUUID == rhs.pipelineCacheUUID );
#endif
}
bool operator!=( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t headerSize = {};
VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne;
uint32_t vendorID = {};
uint32_t deviceID = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
};
// wrapper struct for struct VkPipelineColorBlendAdvancedStateCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendAdvancedStateCreateInfoEXT.html
struct PipelineColorBlendAdvancedStateCreateInfoEXT
{
using NativeType = VkPipelineColorBlendAdvancedStateCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcPremultiplied{ srcPremultiplied_ }, dstPremultiplied{ dstPremultiplied_ }, blendOverlap{ blendOverlap_ }
{}
VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineColorBlendAdvancedStateCreateInfoEXT( *reinterpret_cast<PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs ) )
{}
PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
{
srcPremultiplied = srcPremultiplied_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
{
dstPremultiplied = dstPremultiplied_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
{
blendOverlap = blendOverlap_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
}
operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
}
operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
}
operator VkPipelineColorBlendAdvancedStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::BlendOverlapEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcPremultiplied, dstPremultiplied, blendOverlap );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default;
#else
bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcPremultiplied == rhs.srcPremultiplied )
&& ( dstPremultiplied == rhs.dstPremultiplied )
&& ( blendOverlap == rhs.blendOverlap );
#endif
}
bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
};
template <>
struct CppType<StructureType, StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT>
{
using Type = PipelineColorBlendAdvancedStateCreateInfoEXT;
};
// wrapper struct for struct VkPipelineColorWriteCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorWriteCreateInfoEXT.html
struct PipelineColorWriteCreateInfoEXT
{
using NativeType = VkPipelineColorWriteCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT(uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, attachmentCount{ attachmentCount_ }, pColorWriteEnables{ pColorWriteEnables_ }
{}
VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineColorWriteCreateInfoEXT( *reinterpret_cast<PipelineColorWriteCreateInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineColorWriteCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_, const void * pNext_ = nullptr )
: pNext( pNext_ ), attachmentCount( static_cast<uint32_t>( colorWriteEnables_.size() ) ), pColorWriteEnables( colorWriteEnables_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineColorWriteCreateInfoEXT & operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = attachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
{
pColorWriteEnables = pColorWriteEnables_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineColorWriteCreateInfoEXT & setColorWriteEnables( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = static_cast<uint32_t>( colorWriteEnables_.size() );
pColorWriteEnables = colorWriteEnables_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineColorWriteCreateInfoEXT*>( this );
}
operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineColorWriteCreateInfoEXT*>( this );
}
operator VkPipelineColorWriteCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineColorWriteCreateInfoEXT*>( this );
}
operator VkPipelineColorWriteCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineColorWriteCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Bool32 * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, attachmentCount, pColorWriteEnables );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default;
#else
bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( attachmentCount == rhs.attachmentCount )
&& ( pColorWriteEnables == rhs.pColorWriteEnables );
#endif
}
bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorWriteCreateInfoEXT;
const void * pNext = {};
uint32_t attachmentCount = {};
const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineColorWriteCreateInfoEXT>
{
using Type = PipelineColorWriteCreateInfoEXT;
};
// wrapper struct for struct VkPipelineCompilerControlCreateInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCompilerControlCreateInfoAMD.html
struct PipelineCompilerControlCreateInfoAMD
{
using NativeType = VkPipelineCompilerControlCreateInfoAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD(VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, compilerControlFlags{ compilerControlFlags_ }
{}
VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCompilerControlCreateInfoAMD( *reinterpret_cast<PipelineCompilerControlCreateInfoAMD const *>( &rhs ) )
{}
PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT
{
compilerControlFlags = compilerControlFlags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD*>( this );
}
operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD*>( this );
}
operator VkPipelineCompilerControlCreateInfoAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD*>( this );
}
operator VkPipelineCompilerControlCreateInfoAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, compilerControlFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default;
#else
bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( compilerControlFlags == rhs.compilerControlFlags );
#endif
}
bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineCompilerControlCreateInfoAMD>
{
using Type = PipelineCompilerControlCreateInfoAMD;
};
// wrapper struct for struct VkPipelineCoverageModulationStateCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCoverageModulationStateCreateInfoNV.html
struct PipelineCoverageModulationStateCreateInfoNV
{
using NativeType = VkPipelineCoverageModulationStateCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, uint32_t coverageModulationTableCount_ = {}, const float * pCoverageModulationTable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, coverageModulationMode{ coverageModulationMode_ }, coverageModulationTableEnable{ coverageModulationTableEnable_ }, coverageModulationTableCount{ coverageModulationTableCount_ }, pCoverageModulationTable{ pCoverageModulationTable_ }
{}
VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCoverageModulationStateCreateInfoNV( *reinterpret_cast<PipelineCoverageModulationStateCreateInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( static_cast<uint32_t>( coverageModulationTable_.size() ) ), pCoverageModulationTable( coverageModulationTable_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
{
coverageModulationMode = coverageModulationMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
{
coverageModulationTableEnable = coverageModulationTableEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT
{
coverageModulationTableCount = coverageModulationTableCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
{
pCoverageModulationTable = pCoverageModulationTable_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
{
coverageModulationTableCount = static_cast<uint32_t>( coverageModulationTable_.size() );
pCoverageModulationTable = coverageModulationTable_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV*>( this );
}
operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV*>( this );
}
operator VkPipelineCoverageModulationStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV*>( this );
}
operator VkPipelineCoverageModulationStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV const &, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const float * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, coverageModulationMode, coverageModulationTableEnable, coverageModulationTableCount, pCoverageModulationTable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default;
#else
bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( coverageModulationMode == rhs.coverageModulationMode )
&& ( coverageModulationTableEnable == rhs.coverageModulationTableEnable )
&& ( coverageModulationTableCount == rhs.coverageModulationTableCount )
&& ( pCoverageModulationTable == rhs.pCoverageModulationTable );
#endif
}
bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {};
VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone;
VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {};
uint32_t coverageModulationTableCount = {};
const float * pCoverageModulationTable = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineCoverageModulationStateCreateInfoNV>
{
using Type = PipelineCoverageModulationStateCreateInfoNV;
};
// wrapper struct for struct VkPipelineCoverageReductionStateCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCoverageReductionStateCreateInfoNV.html
struct PipelineCoverageReductionStateCreateInfoNV
{
using NativeType = VkPipelineCoverageReductionStateCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, coverageReductionMode{ coverageReductionMode_ }
{}
VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCoverageReductionStateCreateInfoNV( *reinterpret_cast<PipelineCoverageReductionStateCreateInfoNV const *>( &rhs ) )
{}
PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
{
coverageReductionMode = coverageReductionMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV*>( this );
}
operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV*>( this );
}
operator VkPipelineCoverageReductionStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV*>( this );
}
operator VkPipelineCoverageReductionStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV const &, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, coverageReductionMode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default;
#else
bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( coverageReductionMode == rhs.coverageReductionMode );
#endif
}
bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {};
VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
};
template <>
struct CppType<StructureType, StructureType::ePipelineCoverageReductionStateCreateInfoNV>
{
using Type = PipelineCoverageReductionStateCreateInfoNV;
};
// wrapper struct for struct VkPipelineCoverageToColorStateCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCoverageToColorStateCreateInfoNV.html
struct PipelineCoverageToColorStateCreateInfoNV
{
using NativeType = VkPipelineCoverageToColorStateCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, uint32_t coverageToColorLocation_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, coverageToColorEnable{ coverageToColorEnable_ }, coverageToColorLocation{ coverageToColorLocation_ }
{}
VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCoverageToColorStateCreateInfoNV( *reinterpret_cast<PipelineCoverageToColorStateCreateInfoNV const *>( &rhs ) )
{}
PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
{
coverageToColorEnable = coverageToColorEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
{
coverageToColorLocation = coverageToColorLocation_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV*>( this );
}
operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV*>( this );
}
operator VkPipelineCoverageToColorStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV*>( this );
}
operator VkPipelineCoverageToColorStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, coverageToColorEnable, coverageToColorLocation );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default;
#else
bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( coverageToColorEnable == rhs.coverageToColorEnable )
&& ( coverageToColorLocation == rhs.coverageToColorLocation );
#endif
}
bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {};
VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {};
uint32_t coverageToColorLocation = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineCoverageToColorStateCreateInfoNV>
{
using Type = PipelineCoverageToColorStateCreateInfoNV;
};
// wrapper struct for struct VkPipelineCreateFlags2CreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreateFlags2CreateInfo.html
struct PipelineCreateFlags2CreateInfo
{
using NativeType = VkPipelineCreateFlags2CreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreateFlags2CreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfo( PipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCreateFlags2CreateInfo( VkPipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCreateFlags2CreateInfo( *reinterpret_cast<PipelineCreateFlags2CreateInfo const *>( &rhs ) )
{}
PipelineCreateFlags2CreateInfo & operator=( PipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineCreateFlags2CreateInfo & operator=( VkPipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineCreateFlags2CreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCreateFlags2CreateInfo*>( this );
}
operator VkPipelineCreateFlags2CreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCreateFlags2CreateInfo*>( this );
}
operator VkPipelineCreateFlags2CreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineCreateFlags2CreateInfo*>( this );
}
operator VkPipelineCreateFlags2CreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineCreateFlags2CreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineCreateFlags2CreateInfo const & ) const = default;
#else
bool operator==( PipelineCreateFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( PipelineCreateFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreateFlags2CreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 flags = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineCreateFlags2CreateInfo>
{
using Type = PipelineCreateFlags2CreateInfo;
};
using PipelineCreateFlags2CreateInfoKHR = PipelineCreateFlags2CreateInfo;
// wrapper struct for struct VkPipelineCreationFeedback, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreationFeedback.html
struct PipelineCreationFeedback
{
using NativeType = VkPipelineCreationFeedback;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineCreationFeedback(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {}, uint64_t duration_ = {}) VULKAN_HPP_NOEXCEPT
: flags{ flags_ }, duration{ duration_ }
{}
VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCreationFeedback( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCreationFeedback( *reinterpret_cast<PipelineCreationFeedback const *>( &rhs ) )
{}
PipelineCreationFeedback & operator=( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineCreationFeedback & operator=( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback const *>( &rhs );
return *this;
}
operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCreationFeedback*>( this );
}
operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCreationFeedback*>( this );
}
operator VkPipelineCreationFeedback const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineCreationFeedback*>( this );
}
operator VkPipelineCreationFeedback *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineCreationFeedback*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( flags, duration );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineCreationFeedback const & ) const = default;
#else
bool operator==( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( flags == rhs.flags )
&& ( duration == rhs.duration );
#endif
}
bool operator!=( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags = {};
uint64_t duration = {};
};
using PipelineCreationFeedbackEXT = PipelineCreationFeedback;
// wrapper struct for struct VkPipelineCreationFeedbackCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreationFeedbackCreateInfo.html
struct PipelineCreationFeedbackCreateInfo
{
using NativeType = VkPipelineCreationFeedbackCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ = {}, uint32_t pipelineStageCreationFeedbackCount_ = {}, VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pPipelineCreationFeedback{ pPipelineCreationFeedback_ }, pipelineStageCreationFeedbackCount{ pipelineStageCreationFeedbackCount_ }, pPipelineStageCreationFeedbacks{ pPipelineStageCreationFeedbacks_ }
{}
VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCreationFeedbackCreateInfo( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCreationFeedbackCreateInfo( *reinterpret_cast<PipelineCreationFeedbackCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineCreationFeedbackCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback> const & pipelineStageCreationFeedbacks_, const void * pNext_ = nullptr )
: pNext( pNext_ ), pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() ) ), pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineCreationFeedbackCreateInfo & operator=( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineCreationFeedbackCreateInfo & operator=( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
{
pPipelineCreationFeedback = pPipelineCreationFeedback_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
{
pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback> const & pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStageCreationFeedbackCount = static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() );
pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCreationFeedbackCreateInfo*>( this );
}
operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCreationFeedbackCreateInfo*>( this );
}
operator VkPipelineCreationFeedbackCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineCreationFeedbackCreateInfo*>( this );
}
operator VkPipelineCreationFeedbackCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineCreationFeedbackCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineCreationFeedbackCreateInfo const & ) const = default;
#else
bool operator==( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback )
&& ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount )
&& ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks );
#endif
}
bool operator!=( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback = {};
uint32_t pipelineStageCreationFeedbackCount = {};
VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineCreationFeedbackCreateInfo>
{
using Type = PipelineCreationFeedbackCreateInfo;
};
using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo;
// wrapper struct for struct VkPipelineDiscardRectangleStateCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDiscardRectangleStateCreateInfoEXT.html
struct PipelineDiscardRectangleStateCreateInfoEXT
{
using NativeType = VkPipelineDiscardRectangleStateCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, uint32_t discardRectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, discardRectangleMode{ discardRectangleMode_ }, discardRectangleCount{ discardRectangleCount_ }, pDiscardRectangles{ pDiscardRectangles_ }
{}
VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineDiscardRectangleStateCreateInfoEXT( *reinterpret_cast<PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( static_cast<uint32_t>( discardRectangles_.size() ) ), pDiscardRectangles( discardRectangles_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
{
discardRectangleMode = discardRectangleMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT
{
discardRectangleCount = discardRectangleCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
{
pDiscardRectangles = pDiscardRectangles_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT
{
discardRectangleCount = static_cast<uint32_t>( discardRectangles_.size() );
pDiscardRectangles = discardRectangles_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
}
operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
}
operator VkPipelineDiscardRectangleStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
}
operator VkPipelineDiscardRectangleStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, discardRectangleMode, discardRectangleCount, pDiscardRectangles );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default;
#else
bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( discardRectangleMode == rhs.discardRectangleMode )
&& ( discardRectangleCount == rhs.discardRectangleCount )
&& ( pDiscardRectangles == rhs.pDiscardRectangles );
#endif
}
bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive;
uint32_t discardRectangleCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineDiscardRectangleStateCreateInfoEXT>
{
using Type = PipelineDiscardRectangleStateCreateInfoEXT;
};
// wrapper struct for struct VkPipelineExecutableInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutableInfoKHR.html
struct PipelineExecutableInfoKHR
{
using NativeType = VkPipelineExecutableInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, uint32_t executableIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipeline{ pipeline_ }, executableIndex{ executableIndex_ }
{}
VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineExecutableInfoKHR( *reinterpret_cast<PipelineExecutableInfoKHR const *>( &rhs ) )
{}
PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT
{
executableIndex = executableIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineExecutableInfoKHR*>( this );
}
operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineExecutableInfoKHR*>( this );
}
operator VkPipelineExecutableInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineExecutableInfoKHR*>( this );
}
operator VkPipelineExecutableInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineExecutableInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Pipeline const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipeline, executableIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineExecutableInfoKHR const & ) const = default;
#else
bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipeline == rhs.pipeline )
&& ( executableIndex == rhs.executableIndex );
#endif
}
bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
uint32_t executableIndex = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineExecutableInfoKHR>
{
using Type = PipelineExecutableInfoKHR;
};
// wrapper struct for struct VkPipelineExecutableInternalRepresentationKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutableInternalRepresentationKHR.html
struct PipelineExecutableInternalRepresentationKHR
{
using NativeType = VkPipelineExecutableInternalRepresentationKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR(std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, size_t dataSize_ = {}, void * pData_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, name{ name_ }, description{ description_ }, isText{ isText_ }, dataSize{ dataSize_ }, pData{ pData_ }
{}
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineExecutableInternalRepresentationKHR( *reinterpret_cast<PipelineExecutableInternalRepresentationKHR const *>( &rhs ) )
{}
PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const *>( &rhs );
return *this;
}
operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR*>( this );
}
operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( this );
}
operator VkPipelineExecutableInternalRepresentationKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR*>( this );
}
operator VkPipelineExecutableInternalRepresentationKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::Bool32 const &, size_t const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, name, description, isText, dataSize, pData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = isText <=> rhs.isText; cmp != 0 ) return cmp;
if ( auto cmp = dataSize <=> rhs.dataSize; cmp != 0 ) return cmp;
if ( auto cmp = pData <=> rhs.pData; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( strcmp( name, rhs.name ) == 0 )
&& ( strcmp( description, rhs.description ) == 0 )
&& ( isText == rhs.isText )
&& ( dataSize == rhs.dataSize )
&& ( pData == rhs.pData );
}
bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
VULKAN_HPP_NAMESPACE::Bool32 isText = {};
size_t dataSize = {};
void * pData = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineExecutableInternalRepresentationKHR>
{
using Type = PipelineExecutableInternalRepresentationKHR;
};
// wrapper struct for struct VkPipelineExecutablePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutablePropertiesKHR.html
struct PipelineExecutablePropertiesKHR
{
using NativeType = VkPipelineExecutablePropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR(VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, uint32_t subgroupSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stages{ stages_ }, name{ name_ }, description{ description_ }, subgroupSize{ subgroupSize_ }
{}
VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineExecutablePropertiesKHR( *reinterpret_cast<PipelineExecutablePropertiesKHR const *>( &rhs ) )
{}
PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const *>( &rhs );
return *this;
}
operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineExecutablePropertiesKHR*>( this );
}
operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( this );
}
operator VkPipelineExecutablePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineExecutablePropertiesKHR*>( this );
}
operator VkPipelineExecutablePropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stages, name, description, subgroupSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = stages <=> rhs.stages; cmp != 0 ) return cmp;
if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = subgroupSize <=> rhs.subgroupSize; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stages == rhs.stages )
&& ( strcmp( name, rhs.name ) == 0 )
&& ( strcmp( description, rhs.description ) == 0 )
&& ( subgroupSize == rhs.subgroupSize );
}
bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
uint32_t subgroupSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineExecutablePropertiesKHR>
{
using Type = PipelineExecutablePropertiesKHR;
};
union PipelineExecutableStatisticValueKHR
{
using NativeType = VkPipelineExecutableStatisticValueKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} )
: b32( b32_ )
{}
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( int64_t i64_ )
: i64( i64_ )
{}
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( uint64_t u64_ )
: u64( u64_ )
{}
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( double f64_ )
: f64( f64_ )
{}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT
{
b32 = b32_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT
{
i64 = i64_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT
{
u64 = u64_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT
{
f64 = f64_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineExecutableStatisticValueKHR const &() const
{
return *reinterpret_cast<const VkPipelineExecutableStatisticValueKHR*>( this );
}
operator VkPipelineExecutableStatisticValueKHR &()
{
return *reinterpret_cast<VkPipelineExecutableStatisticValueKHR*>( this );
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
VULKAN_HPP_NAMESPACE::Bool32 b32;
int64_t i64;
uint64_t u64;
double f64;
#else
VkBool32 b32;
int64_t i64;
uint64_t u64;
double f64;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
// wrapper struct for struct VkPipelineExecutableStatisticKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutableStatisticKHR.html
struct PipelineExecutableStatisticKHR
{
using NativeType = VkPipelineExecutableStatisticKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR(std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, name{ name_ }, description{ description_ }, format{ format_ }, value{ value_ }
{}
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineExecutableStatisticKHR( *reinterpret_cast<PipelineExecutableStatisticKHR const *>( &rhs ) )
{}
PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const *>( &rhs );
return *this;
}
operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineExecutableStatisticKHR*>( this );
}
operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineExecutableStatisticKHR*>( this );
}
operator VkPipelineExecutableStatisticKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineExecutableStatisticKHR*>( this );
}
operator VkPipelineExecutableStatisticKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineExecutableStatisticKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR const &, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, name, description, format, value );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32;
VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineExecutableStatisticKHR>
{
using Type = PipelineExecutableStatisticKHR;
};
// wrapper struct for struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineFragmentShadingRateEnumStateCreateInfoNV.html
struct PipelineFragmentShadingRateEnumStateCreateInfoNV
{
using NativeType = VkPipelineFragmentShadingRateEnumStateCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV(VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize, VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel, std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> const & combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shadingRateType{ shadingRateType_ }, shadingRate{ shadingRate_ }, combinerOps{ combinerOps_ }
{}
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineFragmentShadingRateEnumStateCreateInfoNV( *reinterpret_cast<PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs ) )
{}
PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateType = shadingRateType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
{
shadingRate = shadingRate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setCombinerOps( std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
{
combinerOps = combinerOps_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineFragmentShadingRateEnumStateCreateInfoNV*>( this );
}
operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineFragmentShadingRateEnumStateCreateInfoNV*>( this );
}
operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineFragmentShadingRateEnumStateCreateInfoNV*>( this );
}
operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineFragmentShadingRateEnumStateCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV const &, VULKAN_HPP_NAMESPACE::FragmentShadingRateNV const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shadingRateType, shadingRate, combinerOps );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const & ) const = default;
#else
bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shadingRateType == rhs.shadingRateType )
&& ( shadingRate == rhs.shadingRate )
&& ( combinerOps == rhs.combinerOps );
#endif
}
bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize;
VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV>
{
using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV;
};
// wrapper struct for struct VkPipelineFragmentShadingRateStateCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineFragmentShadingRateStateCreateInfoKHR.html
struct PipelineFragmentShadingRateStateCreateInfoKHR
{
using NativeType = VkPipelineFragmentShadingRateStateCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR(VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> const & combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fragmentSize{ fragmentSize_ }, combinerOps{ combinerOps_ }
{}
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineFragmentShadingRateStateCreateInfoKHR( *reinterpret_cast<PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs ) )
{}
PipelineFragmentShadingRateStateCreateInfoKHR & operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineFragmentShadingRateStateCreateInfoKHR & operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT
{
fragmentSize = fragmentSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setCombinerOps( std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
{
combinerOps = combinerOps_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineFragmentShadingRateStateCreateInfoKHR*>( this );
}
operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineFragmentShadingRateStateCreateInfoKHR*>( this );
}
operator VkPipelineFragmentShadingRateStateCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineFragmentShadingRateStateCreateInfoKHR*>( this );
}
operator VkPipelineFragmentShadingRateStateCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineFragmentShadingRateStateCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fragmentSize, combinerOps );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default;
#else
bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fragmentSize == rhs.fragmentSize )
&& ( combinerOps == rhs.combinerOps );
#endif
}
bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR>
{
using Type = PipelineFragmentShadingRateStateCreateInfoKHR;
};
// wrapper struct for struct VkPipelineIndirectDeviceAddressInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineIndirectDeviceAddressInfoNV.html
struct PipelineIndirectDeviceAddressInfoNV
{
using NativeType = VkPipelineIndirectDeviceAddressInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineIndirectDeviceAddressInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineIndirectDeviceAddressInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineBindPoint{ pipelineBindPoint_ }, pipeline{ pipeline_ }
{}
VULKAN_HPP_CONSTEXPR PipelineIndirectDeviceAddressInfoNV( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineIndirectDeviceAddressInfoNV( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineIndirectDeviceAddressInfoNV( *reinterpret_cast<PipelineIndirectDeviceAddressInfoNV const *>( &rhs ) )
{}
PipelineIndirectDeviceAddressInfoNV & operator=( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineIndirectDeviceAddressInfoNV & operator=( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineIndirectDeviceAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV*>( this );
}
operator VkPipelineIndirectDeviceAddressInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineIndirectDeviceAddressInfoNV*>( this );
}
operator VkPipelineIndirectDeviceAddressInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV*>( this );
}
operator VkPipelineIndirectDeviceAddressInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineIndirectDeviceAddressInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, VULKAN_HPP_NAMESPACE::Pipeline const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineBindPoint, pipeline );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineIndirectDeviceAddressInfoNV const & ) const = default;
#else
bool operator==( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
&& ( pipeline == rhs.pipeline );
#endif
}
bool operator!=( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineIndirectDeviceAddressInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineIndirectDeviceAddressInfoNV>
{
using Type = PipelineIndirectDeviceAddressInfoNV;
};
// wrapper struct for struct VkPipelineInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineInfoKHR.html
struct PipelineInfoKHR
{
using NativeType = VkPipelineInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipeline{ pipeline_ }
{}
VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineInfoKHR( *reinterpret_cast<PipelineInfoKHR const *>( &rhs ) )
{}
PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineInfoKHR*>( this );
}
operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineInfoKHR*>( this );
}
operator VkPipelineInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineInfoKHR*>( this );
}
operator VkPipelineInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Pipeline const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipeline );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineInfoKHR const & ) const = default;
#else
bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipeline == rhs.pipeline );
#endif
}
bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineInfoKHR>
{
using Type = PipelineInfoKHR;
};
using PipelineInfoEXT = PipelineInfoKHR;
// wrapper struct for struct VkPipelineLayoutCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineLayoutCreateInfo.html
struct PipelineLayoutCreateInfo
{
using NativeType = VkPipelineLayoutCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo(VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, uint32_t setLayoutCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, uint32_t pushConstantRangeCount_ = {}, const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, setLayoutCount{ setLayoutCount_ }, pSetLayouts{ pSetLayouts_ }, pushConstantRangeCount{ pushConstantRangeCount_ }, pPushConstantRanges{ pPushConstantRanges_ }
{}
VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineLayoutCreateInfo( *reinterpret_cast<PipelineLayoutCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ), pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) ), pPushConstantRanges( pushConstantRanges_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
{
setLayoutCount = setLayoutCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pSetLayouts = pSetLayouts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineLayoutCreateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
{
setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
pSetLayouts = setLayouts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
{
pushConstantRangeCount = pushConstantRangeCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pPushConstantRanges = pPushConstantRanges_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineLayoutCreateInfo & setPushConstantRanges( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
pPushConstantRanges = pushConstantRanges_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>( this );
}
operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineLayoutCreateInfo*>( this );
}
operator VkPipelineLayoutCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineLayoutCreateInfo*>( this );
}
operator VkPipelineLayoutCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineLayoutCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PushConstantRange * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, setLayoutCount, pSetLayouts, pushConstantRangeCount, pPushConstantRanges );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineLayoutCreateInfo const & ) const = default;
#else
bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( setLayoutCount == rhs.setLayoutCount )
&& ( pSetLayouts == rhs.pSetLayouts )
&& ( pushConstantRangeCount == rhs.pushConstantRangeCount )
&& ( pPushConstantRanges == rhs.pPushConstantRanges );
#endif
}
bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {};
uint32_t setLayoutCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
uint32_t pushConstantRangeCount = {};
const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineLayoutCreateInfo>
{
using Type = PipelineLayoutCreateInfo;
};
// wrapper struct for struct VkPipelinePropertiesIdentifierEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelinePropertiesIdentifierEXT.html
struct PipelinePropertiesIdentifierEXT
{
using NativeType = VkPipelinePropertiesIdentifierEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelinePropertiesIdentifierEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT(std::array<uint8_t,VK_UUID_SIZE> const & pipelineIdentifier_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipelineIdentifier{ pipelineIdentifier_ }
{}
VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelinePropertiesIdentifierEXT( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelinePropertiesIdentifierEXT( *reinterpret_cast<PipelinePropertiesIdentifierEXT const *>( &rhs ) )
{}
PipelinePropertiesIdentifierEXT & operator=( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelinePropertiesIdentifierEXT & operator=( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT const *>( &rhs );
return *this;
}
operator VkPipelinePropertiesIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelinePropertiesIdentifierEXT*>( this );
}
operator VkPipelinePropertiesIdentifierEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelinePropertiesIdentifierEXT*>( this );
}
operator VkPipelinePropertiesIdentifierEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelinePropertiesIdentifierEXT*>( this );
}
operator VkPipelinePropertiesIdentifierEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelinePropertiesIdentifierEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipelineIdentifier );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelinePropertiesIdentifierEXT const & ) const = default;
#else
bool operator==( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipelineIdentifier == rhs.pipelineIdentifier );
#endif
}
bool operator!=( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelinePropertiesIdentifierEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineIdentifier = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelinePropertiesIdentifierEXT>
{
using Type = PipelinePropertiesIdentifierEXT;
};
// wrapper struct for struct VkPipelineRasterizationConservativeStateCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationConservativeStateCreateInfoEXT.html
struct PipelineRasterizationConservativeStateCreateInfoEXT
{
using NativeType = VkPipelineRasterizationConservativeStateCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, float extraPrimitiveOverestimationSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, conservativeRasterizationMode{ conservativeRasterizationMode_ }, extraPrimitiveOverestimationSize{ extraPrimitiveOverestimationSize_ }
{}
VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineRasterizationConservativeStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs ) )
{}
PipelineRasterizationConservativeStateCreateInfoEXT & operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
{
conservativeRasterizationMode = conservativeRasterizationMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
{
extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
}
operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
}
operator VkPipelineRasterizationConservativeStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
}
operator VkPipelineRasterizationConservativeStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, conservativeRasterizationMode, extraPrimitiveOverestimationSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default;
#else
bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( conservativeRasterizationMode == rhs.conservativeRasterizationMode )
&& ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize );
#endif
}
bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled;
float extraPrimitiveOverestimationSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT>
{
using Type = PipelineRasterizationConservativeStateCreateInfoEXT;
};
// wrapper struct for struct VkPipelineRasterizationDepthClipStateCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationDepthClipStateCreateInfoEXT.html
struct PipelineRasterizationDepthClipStateCreateInfoEXT
{
using NativeType = VkPipelineRasterizationDepthClipStateCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, depthClipEnable{ depthClipEnable_ }
{}
VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineRasterizationDepthClipStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs ) )
{}
PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthClipEnable = depthClipEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
}
operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
}
operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
}
operator VkPipelineRasterizationDepthClipStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, depthClipEnable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default;
#else
bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( depthClipEnable == rhs.depthClipEnable );
#endif
}
bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT>
{
using Type = PipelineRasterizationDepthClipStateCreateInfoEXT;
};
// wrapper struct for struct VkPipelineRasterizationLineStateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationLineStateCreateInfo.html
struct PipelineRasterizationLineStateCreateInfo
{
using NativeType = VkPipelineRasterizationLineStateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfo(VULKAN_HPP_NAMESPACE::LineRasterizationMode lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationMode::eDefault, VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, uint32_t lineStippleFactor_ = {}, uint16_t lineStipplePattern_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, lineRasterizationMode{ lineRasterizationMode_ }, stippledLineEnable{ stippledLineEnable_ }, lineStippleFactor{ lineStippleFactor_ }, lineStipplePattern{ lineStipplePattern_ }
{}
VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfo( PipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRasterizationLineStateCreateInfo( VkPipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineRasterizationLineStateCreateInfo( *reinterpret_cast<PipelineRasterizationLineStateCreateInfo const *>( &rhs ) )
{}
PipelineRasterizationLineStateCreateInfo & operator=( PipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineRasterizationLineStateCreateInfo & operator=( VkPipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationMode lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
{
lineRasterizationMode = lineRasterizationMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT
{
stippledLineEnable = stippledLineEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT
{
lineStippleFactor = lineStippleFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT
{
lineStipplePattern = lineStipplePattern_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineRasterizationLineStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfo*>( this );
}
operator VkPipelineRasterizationLineStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationLineStateCreateInfo*>( this );
}
operator VkPipelineRasterizationLineStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfo*>( this );
}
operator VkPipelineRasterizationLineStateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineRasterizationLineStateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::LineRasterizationMode const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint16_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, lineRasterizationMode, stippledLineEnable, lineStippleFactor, lineStipplePattern );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineRasterizationLineStateCreateInfo const & ) const = default;
#else
bool operator==( PipelineRasterizationLineStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( lineRasterizationMode == rhs.lineRasterizationMode )
&& ( stippledLineEnable == rhs.stippledLineEnable )
&& ( lineStippleFactor == rhs.lineStippleFactor )
&& ( lineStipplePattern == rhs.lineStipplePattern );
#endif
}
bool operator!=( PipelineRasterizationLineStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::LineRasterizationMode lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationMode::eDefault;
VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {};
uint32_t lineStippleFactor = {};
uint16_t lineStipplePattern = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationLineStateCreateInfo>
{
using Type = PipelineRasterizationLineStateCreateInfo;
};
using PipelineRasterizationLineStateCreateInfoEXT = PipelineRasterizationLineStateCreateInfo;
using PipelineRasterizationLineStateCreateInfoKHR = PipelineRasterizationLineStateCreateInfo;
// wrapper struct for struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationProvokingVertexStateCreateInfoEXT.html
struct PipelineRasterizationProvokingVertexStateCreateInfoEXT
{
using NativeType = VkPipelineRasterizationProvokingVertexStateCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, provokingVertexMode{ provokingVertexMode_ }
{}
VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRasterizationProvokingVertexStateCreateInfoEXT( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineRasterizationProvokingVertexStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs ) )
{}
PipelineRasterizationProvokingVertexStateCreateInfoEXT & operator=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineRasterizationProvokingVertexStateCreateInfoEXT & operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT
{
provokingVertexMode = provokingVertexMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT*>( this );
}
operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationProvokingVertexStateCreateInfoEXT*>( this );
}
operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT*>( this );
}
operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineRasterizationProvokingVertexStateCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, provokingVertexMode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & ) const = default;
#else
bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( provokingVertexMode == rhs.provokingVertexMode );
#endif
}
bool operator!=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex;
};
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT>
{
using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT;
};
// wrapper struct for struct VkPipelineRasterizationStateRasterizationOrderAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationStateRasterizationOrderAMD.html
struct PipelineRasterizationStateRasterizationOrderAMD
{
using NativeType = VkPipelineRasterizationStateRasterizationOrderAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD(VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, rasterizationOrder{ rasterizationOrder_ }
{}
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineRasterizationStateRasterizationOrderAMD( *reinterpret_cast<PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs ) )
{}
PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationOrder = rasterizationOrder_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
}
operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
}
operator VkPipelineRasterizationStateRasterizationOrderAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
}
operator VkPipelineRasterizationStateRasterizationOrderAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RasterizationOrderAMD const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, rasterizationOrder );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default;
#else
bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( rasterizationOrder == rhs.rasterizationOrder );
#endif
}
bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict;
};
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationStateRasterizationOrderAMD>
{
using Type = PipelineRasterizationStateRasterizationOrderAMD;
};
// wrapper struct for struct VkPipelineRasterizationStateStreamCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationStateStreamCreateInfoEXT.html
struct PipelineRasterizationStateStreamCreateInfoEXT
{
using NativeType = VkPipelineRasterizationStateStreamCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, uint32_t rasterizationStream_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, rasterizationStream{ rasterizationStream_ }
{}
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineRasterizationStateStreamCreateInfoEXT( *reinterpret_cast<PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs ) )
{}
PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationStream = rasterizationStream_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
}
operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
}
operator VkPipelineRasterizationStateStreamCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
}
operator VkPipelineRasterizationStateStreamCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, rasterizationStream );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default;
#else
bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( rasterizationStream == rhs.rasterizationStream );
#endif
}
bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {};
uint32_t rasterizationStream = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationStateStreamCreateInfoEXT>
{
using Type = PipelineRasterizationStateStreamCreateInfoEXT;
};
// wrapper struct for struct VkPipelineRenderingCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRenderingCreateInfo.html
struct PipelineRenderingCreateInfo
{
using NativeType = VkPipelineRenderingCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRenderingCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo(uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, viewMask{ viewMask_ }, colorAttachmentCount{ colorAttachmentCount_ }, pColorAttachmentFormats{ pColorAttachmentFormats_ }, depthAttachmentFormat{ depthAttachmentFormat_ }, stencilAttachmentFormat{ stencilAttachmentFormat_ }
{}
VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRenderingCreateInfo( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineRenderingCreateInfo( *reinterpret_cast<PipelineRenderingCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineRenderingCreateInfo( uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr )
: pNext( pNext_ ), viewMask( viewMask_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) ), pColorAttachmentFormats( colorAttachmentFormats_.data() ), depthAttachmentFormat( depthAttachmentFormat_ ), stencilAttachmentFormat( stencilAttachmentFormat_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineRenderingCreateInfo & operator=( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineRenderingCreateInfo & operator=( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
{
viewMask = viewMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachmentFormats = pColorAttachmentFormats_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineRenderingCreateInfo & setColorAttachmentFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
pColorAttachmentFormats = colorAttachmentFormats_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
{
depthAttachmentFormat = depthAttachmentFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
{
stencilAttachmentFormat = stencilAttachmentFormat_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRenderingCreateInfo*>( this );
}
operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRenderingCreateInfo*>( this );
}
operator VkPipelineRenderingCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineRenderingCreateInfo*>( this );
}
operator VkPipelineRenderingCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineRenderingCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Format const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineRenderingCreateInfo const & ) const = default;
#else
bool operator==( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( viewMask == rhs.viewMask )
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
&& ( pColorAttachmentFormats == rhs.pColorAttachmentFormats )
&& ( depthAttachmentFormat == rhs.depthAttachmentFormat )
&& ( stencilAttachmentFormat == rhs.stencilAttachmentFormat );
#endif
}
bool operator!=( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRenderingCreateInfo;
const void * pNext = {};
uint32_t viewMask = {};
uint32_t colorAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {};
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::ePipelineRenderingCreateInfo>
{
using Type = PipelineRenderingCreateInfo;
};
using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo;
// wrapper struct for struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRepresentativeFragmentTestStateCreateInfoNV.html
struct PipelineRepresentativeFragmentTestStateCreateInfoNV
{
using NativeType = VkPipelineRepresentativeFragmentTestStateCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, representativeFragmentTestEnable{ representativeFragmentTestEnable_ }
{}
VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineRepresentativeFragmentTestStateCreateInfoNV( *reinterpret_cast<PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs ) )
{}
PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
representativeFragmentTestEnable = representativeFragmentTestEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
}
operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
}
operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
}
operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, representativeFragmentTestEnable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default;
#else
bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable );
#endif
}
bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV>
{
using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV;
};
// wrapper struct for struct VkPipelineRobustnessCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRobustnessCreateInfo.html
struct PipelineRobustnessCreateInfo
{
using NativeType = VkPipelineRobustnessCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRobustnessCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfo(VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior storageBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior uniformBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior vertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior images_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, storageBuffers{ storageBuffers_ }, uniformBuffers{ uniformBuffers_ }, vertexInputs{ vertexInputs_ }, images{ images_ }
{}
VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfo( PipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRobustnessCreateInfo( VkPipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineRobustnessCreateInfo( *reinterpret_cast<PipelineRobustnessCreateInfo const *>( &rhs ) )
{}
PipelineRobustnessCreateInfo & operator=( PipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineRobustnessCreateInfo & operator=( VkPipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setStorageBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior storageBuffers_ ) VULKAN_HPP_NOEXCEPT
{
storageBuffers = storageBuffers_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setUniformBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior uniformBuffers_ ) VULKAN_HPP_NOEXCEPT
{
uniformBuffers = uniformBuffers_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setVertexInputs( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior vertexInputs_ ) VULKAN_HPP_NOEXCEPT
{
vertexInputs = vertexInputs_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setImages( VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior images_ ) VULKAN_HPP_NOEXCEPT
{
images = images_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineRobustnessCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRobustnessCreateInfo*>( this );
}
operator VkPipelineRobustnessCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRobustnessCreateInfo*>( this );
}
operator VkPipelineRobustnessCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineRobustnessCreateInfo*>( this );
}
operator VkPipelineRobustnessCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineRobustnessCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, storageBuffers, uniformBuffers, vertexInputs, images );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineRobustnessCreateInfo const & ) const = default;
#else
bool operator==( PipelineRobustnessCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( storageBuffers == rhs.storageBuffers )
&& ( uniformBuffers == rhs.uniformBuffers )
&& ( vertexInputs == rhs.vertexInputs )
&& ( images == rhs.images );
#endif
}
bool operator!=( PipelineRobustnessCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRobustnessCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior storageBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault;
VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior uniformBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault;
VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior vertexInputs = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault;
VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior images = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault;
};
template <>
struct CppType<StructureType, StructureType::ePipelineRobustnessCreateInfo>
{
using Type = PipelineRobustnessCreateInfo;
};
using PipelineRobustnessCreateInfoEXT = PipelineRobustnessCreateInfo;
// wrapper struct for struct VkPipelineSampleLocationsStateCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineSampleLocationsStateCreateInfoEXT.html
struct PipelineSampleLocationsStateCreateInfoEXT
{
using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, sampleLocationsEnable{ sampleLocationsEnable_ }, sampleLocationsInfo{ sampleLocationsInfo_ }
{}
VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineSampleLocationsStateCreateInfoEXT( *reinterpret_cast<PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs ) )
{}
PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsEnable = sampleLocationsEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsInfo = sampleLocationsInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
}
operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
}
operator VkPipelineSampleLocationsStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
}
operator VkPipelineSampleLocationsStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, sampleLocationsEnable, sampleLocationsInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default;
#else
bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( sampleLocationsEnable == rhs.sampleLocationsEnable )
&& ( sampleLocationsInfo == rhs.sampleLocationsInfo );
#endif
}
bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {};
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineSampleLocationsStateCreateInfoEXT>
{
using Type = PipelineSampleLocationsStateCreateInfoEXT;
};
// wrapper struct for struct VkPipelineShaderStageModuleIdentifierCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageModuleIdentifierCreateInfoEXT.html
struct PipelineShaderStageModuleIdentifierCreateInfoEXT
{
using NativeType = VkPipelineShaderStageModuleIdentifierCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineShaderStageModuleIdentifierCreateInfoEXT(uint32_t identifierSize_ = {}, const uint8_t * pIdentifier_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, identifierSize{ identifierSize_ }, pIdentifier{ pIdentifier_ }
{}
VULKAN_HPP_CONSTEXPR PipelineShaderStageModuleIdentifierCreateInfoEXT( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineShaderStageModuleIdentifierCreateInfoEXT( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineShaderStageModuleIdentifierCreateInfoEXT( *reinterpret_cast<PipelineShaderStageModuleIdentifierCreateInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineShaderStageModuleIdentifierCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & identifier_, const void * pNext_ = nullptr )
: pNext( pNext_ ), identifierSize( static_cast<uint32_t>( identifier_.size() ) ), pIdentifier( identifier_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setIdentifierSize( uint32_t identifierSize_ ) VULKAN_HPP_NOEXCEPT
{
identifierSize = identifierSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPIdentifier( const uint8_t * pIdentifier_ ) VULKAN_HPP_NOEXCEPT
{
pIdentifier = pIdentifier_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineShaderStageModuleIdentifierCreateInfoEXT & setIdentifier( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & identifier_ ) VULKAN_HPP_NOEXCEPT
{
identifierSize = static_cast<uint32_t>( identifier_.size() );
pIdentifier = identifier_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineShaderStageModuleIdentifierCreateInfoEXT*>( this );
}
operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineShaderStageModuleIdentifierCreateInfoEXT*>( this );
}
operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineShaderStageModuleIdentifierCreateInfoEXT*>( this );
}
operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineShaderStageModuleIdentifierCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint8_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, identifierSize, pIdentifier );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineShaderStageModuleIdentifierCreateInfoEXT const & ) const = default;
#else
bool operator==( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( identifierSize == rhs.identifierSize )
&& ( pIdentifier == rhs.pIdentifier );
#endif
}
bool operator!=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT;
const void * pNext = {};
uint32_t identifierSize = {};
const uint8_t * pIdentifier = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT>
{
using Type = PipelineShaderStageModuleIdentifierCreateInfoEXT;
};
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkPipelineShaderStageNodeCreateInfoAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageNodeCreateInfoAMDX.html
struct PipelineShaderStageNodeCreateInfoAMDX
{
using NativeType = VkPipelineShaderStageNodeCreateInfoAMDX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineShaderStageNodeCreateInfoAMDX(const char * pName_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pName{ pName_ }, index{ index_ }
{}
VULKAN_HPP_CONSTEXPR PipelineShaderStageNodeCreateInfoAMDX( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineShaderStageNodeCreateInfoAMDX( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineShaderStageNodeCreateInfoAMDX( *reinterpret_cast<PipelineShaderStageNodeCreateInfoAMDX const *>( &rhs ) )
{}
PipelineShaderStageNodeCreateInfoAMDX & operator=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineShaderStageNodeCreateInfoAMDX & operator=( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
{
pName = pName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
{
index = index_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineShaderStageNodeCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX*>( this );
}
operator VkPipelineShaderStageNodeCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineShaderStageNodeCreateInfoAMDX*>( this );
}
operator VkPipelineShaderStageNodeCreateInfoAMDX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX*>( this );
}
operator VkPipelineShaderStageNodeCreateInfoAMDX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineShaderStageNodeCreateInfoAMDX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pName, index );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( pName != rhs.pName )
if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = index <=> rhs.index; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) )
&& ( index == rhs.index );
}
bool operator!=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX;
const void * pNext = {};
const char * pName = {};
uint32_t index = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineShaderStageNodeCreateInfoAMDX>
{
using Type = PipelineShaderStageNodeCreateInfoAMDX;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
// wrapper struct for struct VkPipelineShaderStageRequiredSubgroupSizeCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageRequiredSubgroupSizeCreateInfo.html
struct PipelineShaderStageRequiredSubgroupSizeCreateInfo
{
using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo(uint32_t requiredSubgroupSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, requiredSubgroupSize{ requiredSubgroupSize_ }
{}
VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineShaderStageRequiredSubgroupSizeCreateInfo( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineShaderStageRequiredSubgroupSizeCreateInfo( *reinterpret_cast<PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs ) )
{}
PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs );
return *this;
}
operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfo*>( this );
}
operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfo*>( this );
}
operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfo*>( this );
}
operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, requiredSubgroupSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & ) const = default;
#else
bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( requiredSubgroupSize == rhs.requiredSubgroupSize );
#endif
}
bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
void * pNext = {};
uint32_t requiredSubgroupSize = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo>
{
using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
};
using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
// wrapper struct for struct VkPipelineTessellationDomainOriginStateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineTessellationDomainOriginStateCreateInfo.html
struct PipelineTessellationDomainOriginStateCreateInfo
{
using NativeType = VkPipelineTessellationDomainOriginStateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, domainOrigin{ domainOrigin_ }
{}
VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineTessellationDomainOriginStateCreateInfo( *reinterpret_cast<PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs ) )
{}
PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
{
domainOrigin = domainOrigin_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
}
operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
}
operator VkPipelineTessellationDomainOriginStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
}
operator VkPipelineTessellationDomainOriginStateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TessellationDomainOrigin const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, domainOrigin );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default;
#else
bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( domainOrigin == rhs.domainOrigin );
#endif
}
bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft;
};
template <>
struct CppType<StructureType, StructureType::ePipelineTessellationDomainOriginStateCreateInfo>
{
using Type = PipelineTessellationDomainOriginStateCreateInfo;
};
using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;
// wrapper struct for struct VkVertexInputBindingDivisorDescription, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputBindingDivisorDescription.html
struct VertexInputBindingDivisorDescription
{
using NativeType = VkVertexInputBindingDivisorDescription;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescription(uint32_t binding_ = {}, uint32_t divisor_ = {}) VULKAN_HPP_NOEXCEPT
: binding{ binding_ }, divisor{ divisor_ }
{}
VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescription( VertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputBindingDivisorDescription( VkVertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT
: VertexInputBindingDivisorDescription( *reinterpret_cast<VertexInputBindingDivisorDescription const *>( &rhs ) )
{}
VertexInputBindingDivisorDescription & operator=( VertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VertexInputBindingDivisorDescription & operator=( VkVertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
{
binding = binding_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescription & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
{
divisor = divisor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVertexInputBindingDivisorDescription const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVertexInputBindingDivisorDescription*>( this );
}
operator VkVertexInputBindingDivisorDescription &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVertexInputBindingDivisorDescription*>( this );
}
operator VkVertexInputBindingDivisorDescription const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVertexInputBindingDivisorDescription*>( this );
}
operator VkVertexInputBindingDivisorDescription *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVertexInputBindingDivisorDescription*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( binding, divisor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VertexInputBindingDivisorDescription const & ) const = default;
#else
bool operator==( VertexInputBindingDivisorDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( binding == rhs.binding )
&& ( divisor == rhs.divisor );
#endif
}
bool operator!=( VertexInputBindingDivisorDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t binding = {};
uint32_t divisor = {};
};
using VertexInputBindingDivisorDescriptionEXT = VertexInputBindingDivisorDescription;
using VertexInputBindingDivisorDescriptionKHR = VertexInputBindingDivisorDescription;
// wrapper struct for struct VkPipelineVertexInputDivisorStateCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineVertexInputDivisorStateCreateInfo.html
struct PipelineVertexInputDivisorStateCreateInfo
{
using NativeType = VkPipelineVertexInputDivisorStateCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfo(uint32_t vertexBindingDivisorCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * pVertexBindingDivisors_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, vertexBindingDivisorCount{ vertexBindingDivisorCount_ }, pVertexBindingDivisors{ pVertexBindingDivisors_ }
{}
VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfo( PipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineVertexInputDivisorStateCreateInfo( VkPipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineVertexInputDivisorStateCreateInfo( *reinterpret_cast<PipelineVertexInputDivisorStateCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineVertexInputDivisorStateCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription> const & vertexBindingDivisors_, const void * pNext_ = nullptr )
: pNext( pNext_ ), vertexBindingDivisorCount( static_cast<uint32_t>( vertexBindingDivisors_.size() ) ), pVertexBindingDivisors( vertexBindingDivisors_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineVertexInputDivisorStateCreateInfo & operator=( PipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineVertexInputDivisorStateCreateInfo & operator=( VkPipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT
{
vertexBindingDivisorCount = vertexBindingDivisorCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
{
pVertexBindingDivisors = pVertexBindingDivisors_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineVertexInputDivisorStateCreateInfo & setVertexBindingDivisors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription> const & vertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
{
vertexBindingDivisorCount = static_cast<uint32_t>( vertexBindingDivisors_.size() );
pVertexBindingDivisors = vertexBindingDivisors_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineVertexInputDivisorStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfo*>( this );
}
operator VkPipelineVertexInputDivisorStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfo*>( this );
}
operator VkPipelineVertexInputDivisorStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfo*>( this );
}
operator VkPipelineVertexInputDivisorStateCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, vertexBindingDivisorCount, pVertexBindingDivisors );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineVertexInputDivisorStateCreateInfo const & ) const = default;
#else
bool operator==( PipelineVertexInputDivisorStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount )
&& ( pVertexBindingDivisors == rhs.pVertexBindingDivisors );
#endif
}
bool operator!=( PipelineVertexInputDivisorStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfo;
const void * pNext = {};
uint32_t vertexBindingDivisorCount = {};
const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * pVertexBindingDivisors = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineVertexInputDivisorStateCreateInfo>
{
using Type = PipelineVertexInputDivisorStateCreateInfo;
};
using PipelineVertexInputDivisorStateCreateInfoEXT = PipelineVertexInputDivisorStateCreateInfo;
using PipelineVertexInputDivisorStateCreateInfoKHR = PipelineVertexInputDivisorStateCreateInfo;
// wrapper struct for struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportCoarseSampleOrderStateCreateInfoNV.html
struct PipelineViewportCoarseSampleOrderStateCreateInfoNV
{
using NativeType = VkPipelineViewportCoarseSampleOrderStateCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, uint32_t customSampleOrderCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, sampleOrderType{ sampleOrderType_ }, customSampleOrderCount{ customSampleOrderCount_ }, pCustomSampleOrders{ pCustomSampleOrders_ }
{}
VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineViewportCoarseSampleOrderStateCreateInfoNV( *reinterpret_cast<PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_, const void * pNext_ = nullptr )
: pNext( pNext_ ), sampleOrderType( sampleOrderType_ ), customSampleOrderCount( static_cast<uint32_t>( customSampleOrders_.size() ) ), pCustomSampleOrders( customSampleOrders_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT
{
sampleOrderType = sampleOrderType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT
{
customSampleOrderCount = customSampleOrderCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT
{
pCustomSampleOrders = pCustomSampleOrders_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT
{
customSampleOrderCount = static_cast<uint32_t>( customSampleOrders_.size() );
pCustomSampleOrders = customSampleOrders_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
}
operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
}
operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
}
operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default;
#else
bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( sampleOrderType == rhs.sampleOrderType )
&& ( customSampleOrderCount == rhs.customSampleOrderCount )
&& ( pCustomSampleOrders == rhs.pCustomSampleOrders );
#endif
}
bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault;
uint32_t customSampleOrderCount = {};
const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV>
{
using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV;
};
// wrapper struct for struct VkPipelineViewportDepthClampControlCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportDepthClampControlCreateInfoEXT.html
struct PipelineViewportDepthClampControlCreateInfoEXT
{
using NativeType = VkPipelineViewportDepthClampControlCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClampControlCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineViewportDepthClampControlCreateInfoEXT(VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode_ = VULKAN_HPP_NAMESPACE::DepthClampModeEXT::eViewportRange, const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, depthClampMode{ depthClampMode_ }, pDepthClampRange{ pDepthClampRange_ }
{}
VULKAN_HPP_CONSTEXPR PipelineViewportDepthClampControlCreateInfoEXT( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportDepthClampControlCreateInfoEXT( VkPipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineViewportDepthClampControlCreateInfoEXT( *reinterpret_cast<PipelineViewportDepthClampControlCreateInfoEXT const *>( &rhs ) )
{}
PipelineViewportDepthClampControlCreateInfoEXT & operator=( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineViewportDepthClampControlCreateInfoEXT & operator=( VkPipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & setDepthClampMode( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode_ ) VULKAN_HPP_NOEXCEPT
{
depthClampMode = depthClampMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & setPDepthClampRange( const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange_ ) VULKAN_HPP_NOEXCEPT
{
pDepthClampRange = pDepthClampRange_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineViewportDepthClampControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportDepthClampControlCreateInfoEXT*>( this );
}
operator VkPipelineViewportDepthClampControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportDepthClampControlCreateInfoEXT*>( this );
}
operator VkPipelineViewportDepthClampControlCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineViewportDepthClampControlCreateInfoEXT*>( this );
}
operator VkPipelineViewportDepthClampControlCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineViewportDepthClampControlCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DepthClampModeEXT const &, const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, depthClampMode, pDepthClampRange );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineViewportDepthClampControlCreateInfoEXT const & ) const = default;
#else
bool operator==( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( depthClampMode == rhs.depthClampMode )
&& ( pDepthClampRange == rhs.pDepthClampRange );
#endif
}
bool operator!=( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClampControlCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode = VULKAN_HPP_NAMESPACE::DepthClampModeEXT::eViewportRange;
const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineViewportDepthClampControlCreateInfoEXT>
{
using Type = PipelineViewportDepthClampControlCreateInfoEXT;
};
// wrapper struct for struct VkPipelineViewportDepthClipControlCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportDepthClipControlCreateInfoEXT.html
struct PipelineViewportDepthClipControlCreateInfoEXT
{
using NativeType = VkPipelineViewportDepthClipControlCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, negativeOneToOne{ negativeOneToOne_ }
{}
VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportDepthClipControlCreateInfoEXT( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineViewportDepthClipControlCreateInfoEXT( *reinterpret_cast<PipelineViewportDepthClipControlCreateInfoEXT const *>( &rhs ) )
{}
PipelineViewportDepthClipControlCreateInfoEXT & operator=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineViewportDepthClipControlCreateInfoEXT & operator=( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ ) VULKAN_HPP_NOEXCEPT
{
negativeOneToOne = negativeOneToOne_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineViewportDepthClipControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportDepthClipControlCreateInfoEXT*>( this );
}
operator VkPipelineViewportDepthClipControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportDepthClipControlCreateInfoEXT*>( this );
}
operator VkPipelineViewportDepthClipControlCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineViewportDepthClipControlCreateInfoEXT*>( this );
}
operator VkPipelineViewportDepthClipControlCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineViewportDepthClipControlCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, negativeOneToOne );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineViewportDepthClipControlCreateInfoEXT const & ) const = default;
#else
bool operator==( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( negativeOneToOne == rhs.negativeOneToOne );
#endif
}
bool operator!=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineViewportDepthClipControlCreateInfoEXT>
{
using Type = PipelineViewportDepthClipControlCreateInfoEXT;
};
// wrapper struct for struct VkPipelineViewportExclusiveScissorStateCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportExclusiveScissorStateCreateInfoNV.html
struct PipelineViewportExclusiveScissorStateCreateInfoNV
{
using NativeType = VkPipelineViewportExclusiveScissorStateCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(uint32_t exclusiveScissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, exclusiveScissorCount{ exclusiveScissorCount_ }, pExclusiveScissors{ pExclusiveScissors_ }
{}
VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineViewportExclusiveScissorStateCreateInfoNV( *reinterpret_cast<PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportExclusiveScissorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_, const void * pNext_ = nullptr )
: pNext( pNext_ ), exclusiveScissorCount( static_cast<uint32_t>( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT
{
exclusiveScissorCount = exclusiveScissorCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
{
pExclusiveScissors = pExclusiveScissors_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
{
exclusiveScissorCount = static_cast<uint32_t>( exclusiveScissors_.size() );
pExclusiveScissors = exclusiveScissors_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
}
operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
}
operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
}
operator VkPipelineViewportExclusiveScissorStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, exclusiveScissorCount, pExclusiveScissors );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default;
#else
bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( exclusiveScissorCount == rhs.exclusiveScissorCount )
&& ( pExclusiveScissors == rhs.pExclusiveScissors );
#endif
}
bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
const void * pNext = {};
uint32_t exclusiveScissorCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV>
{
using Type = PipelineViewportExclusiveScissorStateCreateInfoNV;
};
// wrapper struct for struct VkShadingRatePaletteNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShadingRatePaletteNV.html
struct ShadingRatePaletteNV
{
using NativeType = VkShadingRatePaletteNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {}) VULKAN_HPP_NOEXCEPT
: shadingRatePaletteEntryCount{ shadingRatePaletteEntryCount_ }, pShadingRatePaletteEntries{ pShadingRatePaletteEntries_ }
{}
VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ShadingRatePaletteNV( *reinterpret_cast<ShadingRatePaletteNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShadingRatePaletteNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
: shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) ), pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
{
shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
{
pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShadingRatePaletteNV & setShadingRatePaletteEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
{
shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
pShadingRatePaletteEntries = shadingRatePaletteEntries_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShadingRatePaletteNV*>( this );
}
operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShadingRatePaletteNV*>( this );
}
operator VkShadingRatePaletteNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkShadingRatePaletteNV*>( this );
}
operator VkShadingRatePaletteNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkShadingRatePaletteNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( shadingRatePaletteEntryCount, pShadingRatePaletteEntries );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ShadingRatePaletteNV const & ) const = default;
#else
bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
&& ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
#endif
}
bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t shadingRatePaletteEntryCount = {};
const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries = {};
};
// wrapper struct for struct VkPipelineViewportShadingRateImageStateCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportShadingRateImageStateCreateInfoNV.html
struct PipelineViewportShadingRateImageStateCreateInfoNV
{
using NativeType = VkPipelineViewportShadingRateImageStateCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, shadingRateImageEnable{ shadingRateImageEnable_ }, viewportCount{ viewportCount_ }, pShadingRatePalettes{ pShadingRatePalettes_ }
{}
VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineViewportShadingRateImageStateCreateInfoNV( *reinterpret_cast<PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_, const void * pNext_ = nullptr )
: pNext( pNext_ ), shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( static_cast<uint32_t>( shadingRatePalettes_.size() ) ), pShadingRatePalettes( shadingRatePalettes_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineViewportShadingRateImageStateCreateInfoNV & operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateImageEnable = shadingRateImageEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = viewportCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
{
pShadingRatePalettes = pShadingRatePalettes_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = static_cast<uint32_t>( shadingRatePalettes_.size() );
pShadingRatePalettes = shadingRatePalettes_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
}
operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
}
operator VkPipelineViewportShadingRateImageStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
}
operator VkPipelineViewportShadingRateImageStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shadingRateImageEnable, viewportCount, pShadingRatePalettes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default;
#else
bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shadingRateImageEnable == rhs.shadingRateImageEnable )
&& ( viewportCount == rhs.viewportCount )
&& ( pShadingRatePalettes == rhs.pShadingRatePalettes );
#endif
}
bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {};
uint32_t viewportCount = {};
const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV>
{
using Type = PipelineViewportShadingRateImageStateCreateInfoNV;
};
// wrapper struct for struct VkViewportSwizzleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViewportSwizzleNV.html
struct ViewportSwizzleNV
{
using NativeType = VkViewportSwizzleNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ViewportSwizzleNV(VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX) VULKAN_HPP_NOEXCEPT
: x{ x_ }, y{ y_ }, z{ z_ }, w{ w_ }
{}
VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ViewportSwizzleNV( *reinterpret_cast<ViewportSwizzleNV const *>( &rhs ) )
{}
ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT
{
x = x_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT
{
y = y_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT
{
z = z_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT
{
w = w_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkViewportSwizzleNV*>( this );
}
operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkViewportSwizzleNV*>( this );
}
operator VkViewportSwizzleNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkViewportSwizzleNV*>( this );
}
operator VkViewportSwizzleNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkViewportSwizzleNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV const &, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV const &, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV const &, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( x, y, z, w );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ViewportSwizzleNV const & ) const = default;
#else
bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( x == rhs.x )
&& ( y == rhs.y )
&& ( z == rhs.z )
&& ( w == rhs.w );
#endif
}
bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
};
// wrapper struct for struct VkPipelineViewportSwizzleStateCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportSwizzleStateCreateInfoNV.html
struct PipelineViewportSwizzleStateCreateInfoNV
{
using NativeType = VkPipelineViewportSwizzleStateCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, viewportCount{ viewportCount_ }, pViewportSwizzles{ pViewportSwizzles_ }
{}
VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineViewportSwizzleStateCreateInfoNV( *reinterpret_cast<PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), viewportCount( static_cast<uint32_t>( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = viewportCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
{
pViewportSwizzles = pViewportSwizzles_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = static_cast<uint32_t>( viewportSwizzles_.size() );
pViewportSwizzles = viewportSwizzles_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
}
operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
}
operator VkPipelineViewportSwizzleStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
}
operator VkPipelineViewportSwizzleStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, viewportCount, pViewportSwizzles );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default;
#else
bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( viewportCount == rhs.viewportCount )
&& ( pViewportSwizzles == rhs.pViewportSwizzles );
#endif
}
bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {};
uint32_t viewportCount = {};
const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineViewportSwizzleStateCreateInfoNV>
{
using Type = PipelineViewportSwizzleStateCreateInfoNV;
};
// wrapper struct for struct VkViewportWScalingNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViewportWScalingNV.html
struct ViewportWScalingNV
{
using NativeType = VkViewportWScalingNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ViewportWScalingNV(float xcoeff_ = {}, float ycoeff_ = {}) VULKAN_HPP_NOEXCEPT
: xcoeff{ xcoeff_ }, ycoeff{ ycoeff_ }
{}
VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ViewportWScalingNV( *reinterpret_cast<ViewportWScalingNV const *>( &rhs ) )
{}
ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
{
xcoeff = xcoeff_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
{
ycoeff = ycoeff_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkViewportWScalingNV*>( this );
}
operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkViewportWScalingNV*>( this );
}
operator VkViewportWScalingNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkViewportWScalingNV*>( this );
}
operator VkViewportWScalingNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkViewportWScalingNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<float const &, float const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( xcoeff, ycoeff );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ViewportWScalingNV const & ) const = default;
#else
bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( xcoeff == rhs.xcoeff )
&& ( ycoeff == rhs.ycoeff );
#endif
}
bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
float xcoeff = {};
float ycoeff = {};
};
// wrapper struct for struct VkPipelineViewportWScalingStateCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportWScalingStateCreateInfoNV.html
struct PipelineViewportWScalingStateCreateInfoNV
{
using NativeType = VkPipelineViewportWScalingStateCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, viewportWScalingEnable{ viewportWScalingEnable_ }, viewportCount{ viewportCount_ }, pViewportWScalings{ pViewportWScalings_ }
{}
VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineViewportWScalingStateCreateInfoNV( *reinterpret_cast<PipelineViewportWScalingStateCreateInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_, const void * pNext_ = nullptr )
: pNext( pNext_ ), viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( static_cast<uint32_t>( viewportWScalings_.size() ) ), pViewportWScalings( viewportWScalings_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
{
viewportWScalingEnable = viewportWScalingEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = viewportCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT
{
pViewportWScalings = pViewportWScalings_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = static_cast<uint32_t>( viewportWScalings_.size() );
pViewportWScalings = viewportWScalings_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV*>( this );
}
operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV*>( this );
}
operator VkPipelineViewportWScalingStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV*>( this );
}
operator VkPipelineViewportWScalingStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, viewportWScalingEnable, viewportCount, pViewportWScalings );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default;
#else
bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( viewportWScalingEnable == rhs.viewportWScalingEnable )
&& ( viewportCount == rhs.viewportCount )
&& ( pViewportWScalings == rhs.pViewportWScalings );
#endif
}
bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {};
uint32_t viewportCount = {};
const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings = {};
};
template <>
struct CppType<StructureType, StructureType::ePipelineViewportWScalingStateCreateInfoNV>
{
using Type = PipelineViewportWScalingStateCreateInfoNV;
};
#if defined( VK_USE_PLATFORM_GGP )
// wrapper struct for struct VkPresentFrameTokenGGP, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentFrameTokenGGP.html
struct PresentFrameTokenGGP
{
using NativeType = VkPresentFrameTokenGGP;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP(GgpFrameToken frameToken_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, frameToken{ frameToken_ }
{}
VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
: PresentFrameTokenGGP( *reinterpret_cast<PresentFrameTokenGGP const *>( &rhs ) )
{}
PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT
{
frameToken = frameToken_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentFrameTokenGGP*>( this );
}
operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentFrameTokenGGP*>( this );
}
operator VkPresentFrameTokenGGP const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPresentFrameTokenGGP*>( this );
}
operator VkPresentFrameTokenGGP *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPresentFrameTokenGGP*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, GgpFrameToken const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, frameToken );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 );
}
bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP;
const void * pNext = {};
GgpFrameToken frameToken = {};
};
template <>
struct CppType<StructureType, StructureType::ePresentFrameTokenGGP>
{
using Type = PresentFrameTokenGGP;
};
#endif /*VK_USE_PLATFORM_GGP*/
// wrapper struct for struct VkPresentId2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentId2KHR.html
struct PresentId2KHR
{
using NativeType = VkPresentId2KHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentId2KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PresentId2KHR(uint32_t swapchainCount_ = {}, const uint64_t * pPresentIds_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, swapchainCount{ swapchainCount_ }, pPresentIds{ pPresentIds_ }
{}
VULKAN_HPP_CONSTEXPR PresentId2KHR( PresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentId2KHR( VkPresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PresentId2KHR( *reinterpret_cast<PresentId2KHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentId2KHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & presentIds_, const void * pNext_ = nullptr )
: pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( presentIds_.size() ) ), pPresentIds( presentIds_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PresentId2KHR & operator=( PresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PresentId2KHR & operator=( VkPresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentId2KHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PresentId2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentId2KHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = swapchainCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentId2KHR & setPPresentIds( const uint64_t * pPresentIds_ ) VULKAN_HPP_NOEXCEPT
{
pPresentIds = pPresentIds_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentId2KHR & setPresentIds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & presentIds_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( presentIds_.size() );
pPresentIds = presentIds_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPresentId2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentId2KHR*>( this );
}
operator VkPresentId2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentId2KHR*>( this );
}
operator VkPresentId2KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPresentId2KHR*>( this );
}
operator VkPresentId2KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPresentId2KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, swapchainCount, pPresentIds );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PresentId2KHR const & ) const = default;
#else
bool operator==( PresentId2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchainCount == rhs.swapchainCount )
&& ( pPresentIds == rhs.pPresentIds );
#endif
}
bool operator!=( PresentId2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentId2KHR;
const void * pNext = {};
uint32_t swapchainCount = {};
const uint64_t * pPresentIds = {};
};
template <>
struct CppType<StructureType, StructureType::ePresentId2KHR>
{
using Type = PresentId2KHR;
};
// wrapper struct for struct VkPresentIdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentIdKHR.html
struct PresentIdKHR
{
using NativeType = VkPresentIdKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentIdKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PresentIdKHR(uint32_t swapchainCount_ = {}, const uint64_t * pPresentIds_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, swapchainCount{ swapchainCount_ }, pPresentIds{ pPresentIds_ }
{}
VULKAN_HPP_CONSTEXPR PresentIdKHR( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentIdKHR( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PresentIdKHR( *reinterpret_cast<PresentIdKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentIdKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & presentIds_, const void * pNext_ = nullptr )
: pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( presentIds_.size() ) ), pPresentIds( presentIds_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PresentIdKHR & operator=( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PresentIdKHR & operator=( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentIdKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = swapchainCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPPresentIds( const uint64_t * pPresentIds_ ) VULKAN_HPP_NOEXCEPT
{
pPresentIds = pPresentIds_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentIdKHR & setPresentIds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & presentIds_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( presentIds_.size() );
pPresentIds = presentIds_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPresentIdKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentIdKHR*>( this );
}
operator VkPresentIdKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentIdKHR*>( this );
}
operator VkPresentIdKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPresentIdKHR*>( this );
}
operator VkPresentIdKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPresentIdKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, swapchainCount, pPresentIds );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PresentIdKHR const & ) const = default;
#else
bool operator==( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchainCount == rhs.swapchainCount )
&& ( pPresentIds == rhs.pPresentIds );
#endif
}
bool operator!=( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentIdKHR;
const void * pNext = {};
uint32_t swapchainCount = {};
const uint64_t * pPresentIds = {};
};
template <>
struct CppType<StructureType, StructureType::ePresentIdKHR>
{
using Type = PresentIdKHR;
};
// wrapper struct for struct VkPresentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentInfoKHR.html
struct PresentInfoKHR
{
using NativeType = VkPresentInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PresentInfoKHR(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {}, const uint32_t * pImageIndices_ = {}, VULKAN_HPP_NAMESPACE::Result * pResults_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, waitSemaphoreCount{ waitSemaphoreCount_ }, pWaitSemaphores{ pWaitSemaphores_ }, swapchainCount{ swapchainCount_ }, pSwapchains{ pSwapchains_ }, pImageIndices{ pImageIndices_ }, pResults{ pResults_ }
{}
VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PresentInfoKHR( *reinterpret_cast<PresentInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), swapchainCount( static_cast<uint32_t>( swapchains_.size() ) ), pSwapchains( swapchains_.data() ), pImageIndices( imageIndices_.data() ), pResults( results_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() );
VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) );
VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) );
#else
if ( swapchains_.size() != imageIndices_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" );
}
if ( !results_.empty() && ( swapchains_.size() != results_.size() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" );
}
if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = waitSemaphoreCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphores = pWaitSemaphores_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentInfoKHR & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
pWaitSemaphores = waitSemaphores_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = swapchainCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT
{
pSwapchains = pSwapchains_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentInfoKHR & setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( swapchains_.size() );
pSwapchains = swapchains_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT
{
pImageIndices = pImageIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( imageIndices_.size() );
pImageIndices = imageIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT
{
pResults = pResults_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( results_.size() );
pResults = results_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentInfoKHR*>( this );
}
operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentInfoKHR*>( this );
}
operator VkPresentInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPresentInfoKHR*>( this );
}
operator VkPresentInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPresentInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SwapchainKHR * const &, const uint32_t * const &, VULKAN_HPP_NAMESPACE::Result * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PresentInfoKHR const & ) const = default;
#else
bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( waitSemaphoreCount == rhs.waitSemaphoreCount )
&& ( pWaitSemaphores == rhs.pWaitSemaphores )
&& ( swapchainCount == rhs.swapchainCount )
&& ( pSwapchains == rhs.pSwapchains )
&& ( pImageIndices == rhs.pImageIndices )
&& ( pResults == rhs.pResults );
#endif
}
bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR;
const void * pNext = {};
uint32_t waitSemaphoreCount = {};
const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {};
uint32_t swapchainCount = {};
const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains = {};
const uint32_t * pImageIndices = {};
VULKAN_HPP_NAMESPACE::Result * pResults = {};
};
template <>
struct CppType<StructureType, StructureType::ePresentInfoKHR>
{
using Type = PresentInfoKHR;
};
// wrapper struct for struct VkRectLayerKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRectLayerKHR.html
struct RectLayerKHR
{
using NativeType = VkRectLayerKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RectLayerKHR(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {}) VULKAN_HPP_NOEXCEPT
: offset{ offset_ }, extent{ extent_ }, layer{ layer_ }
{}
VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: RectLayerKHR( *reinterpret_cast<RectLayerKHR const *>( &rhs ) )
{}
explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} )
: offset( rect2D.offset )
, extent( rect2D.extent )
, layer( layer_ )
{}
RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RectLayerKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT
{
layer = layer_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRectLayerKHR*>( this );
}
operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRectLayerKHR*>( this );
}
operator VkRectLayerKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRectLayerKHR*>( this );
}
operator VkRectLayerKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRectLayerKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( offset, extent, layer );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RectLayerKHR const & ) const = default;
#else
bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( offset == rhs.offset )
&& ( extent == rhs.extent )
&& ( layer == rhs.layer );
#endif
}
bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Offset2D offset = {};
VULKAN_HPP_NAMESPACE::Extent2D extent = {};
uint32_t layer = {};
};
// wrapper struct for struct VkPresentRegionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentRegionKHR.html
struct PresentRegionKHR
{
using NativeType = VkPresentRegionKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PresentRegionKHR(uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {}) VULKAN_HPP_NOEXCEPT
: rectangleCount{ rectangleCount_ }, pRectangles{ pRectangles_ }
{}
VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PresentRegionKHR( *reinterpret_cast<PresentRegionKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ )
: rectangleCount( static_cast<uint32_t>( rectangles_.size() ) ), pRectangles( rectangles_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT
{
rectangleCount = rectangleCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT
{
pRectangles = pRectangles_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentRegionKHR & setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ ) VULKAN_HPP_NOEXCEPT
{
rectangleCount = static_cast<uint32_t>( rectangles_.size() );
pRectangles = rectangles_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentRegionKHR*>( this );
}
operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentRegionKHR*>( this );
}
operator VkPresentRegionKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPresentRegionKHR*>( this );
}
operator VkPresentRegionKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPresentRegionKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::RectLayerKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( rectangleCount, pRectangles );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PresentRegionKHR const & ) const = default;
#else
bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( rectangleCount == rhs.rectangleCount )
&& ( pRectangles == rhs.pRectangles );
#endif
}
bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t rectangleCount = {};
const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {};
};
// wrapper struct for struct VkPresentRegionsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentRegionsKHR.html
struct PresentRegionsKHR
{
using NativeType = VkPresentRegionsKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PresentRegionsKHR(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, swapchainCount{ swapchainCount_ }, pRegions{ pRegions_ }
{}
VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PresentRegionsKHR( *reinterpret_cast<PresentRegionsKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_, const void * pNext_ = nullptr )
: pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionsKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = swapchainCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentRegionsKHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentRegionsKHR*>( this );
}
operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentRegionsKHR*>( this );
}
operator VkPresentRegionsKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPresentRegionsKHR*>( this );
}
operator VkPresentRegionsKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPresentRegionsKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentRegionKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, swapchainCount, pRegions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PresentRegionsKHR const & ) const = default;
#else
bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchainCount == rhs.swapchainCount )
&& ( pRegions == rhs.pRegions );
#endif
}
bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR;
const void * pNext = {};
uint32_t swapchainCount = {};
const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {};
};
template <>
struct CppType<StructureType, StructureType::ePresentRegionsKHR>
{
using Type = PresentRegionsKHR;
};
// wrapper struct for struct VkPresentTimeGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentTimeGOOGLE.html
struct PresentTimeGOOGLE
{
using NativeType = VkPresentTimeGOOGLE;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}) VULKAN_HPP_NOEXCEPT
: presentID{ presentID_ }, desiredPresentTime{ desiredPresentTime_ }
{}
VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
: PresentTimeGOOGLE( *reinterpret_cast<PresentTimeGOOGLE const *>( &rhs ) )
{}
PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT
{
presentID = presentID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT
{
desiredPresentTime = desiredPresentTime_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentTimeGOOGLE*>( this );
}
operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentTimeGOOGLE*>( this );
}
operator VkPresentTimeGOOGLE const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPresentTimeGOOGLE*>( this );
}
operator VkPresentTimeGOOGLE *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPresentTimeGOOGLE*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( presentID, desiredPresentTime );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PresentTimeGOOGLE const & ) const = default;
#else
bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( presentID == rhs.presentID )
&& ( desiredPresentTime == rhs.desiredPresentTime );
#endif
}
bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t presentID = {};
uint64_t desiredPresentTime = {};
};
// wrapper struct for struct VkPresentTimesInfoGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentTimesInfoGOOGLE.html
struct PresentTimesInfoGOOGLE
{
using NativeType = VkPresentTimesInfoGOOGLE;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, swapchainCount{ swapchainCount_ }, pTimes{ pTimes_ }
{}
VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
: PresentTimesInfoGOOGLE( *reinterpret_cast<PresentTimesInfoGOOGLE const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentTimesInfoGOOGLE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_, const void * pNext_ = nullptr )
: pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( times_.size() ) ), pTimes( times_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = swapchainCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT
{
pTimes = pTimes_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PresentTimesInfoGOOGLE & setTimes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( times_.size() );
pTimes = times_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentTimesInfoGOOGLE*>( this );
}
operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentTimesInfoGOOGLE*>( this );
}
operator VkPresentTimesInfoGOOGLE const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPresentTimesInfoGOOGLE*>( this );
}
operator VkPresentTimesInfoGOOGLE *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPresentTimesInfoGOOGLE*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, swapchainCount, pTimes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default;
#else
bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchainCount == rhs.swapchainCount )
&& ( pTimes == rhs.pTimes );
#endif
}
bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE;
const void * pNext = {};
uint32_t swapchainCount = {};
const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes = {};
};
template <>
struct CppType<StructureType, StructureType::ePresentTimesInfoGOOGLE>
{
using Type = PresentTimesInfoGOOGLE;
};
// wrapper struct for struct VkPresentWait2InfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentWait2InfoKHR.html
struct PresentWait2InfoKHR
{
using NativeType = VkPresentWait2InfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentWait2InfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PresentWait2InfoKHR(uint64_t presentId_ = {}, uint64_t timeout_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentId{ presentId_ }, timeout{ timeout_ }
{}
VULKAN_HPP_CONSTEXPR PresentWait2InfoKHR( PresentWait2InfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentWait2InfoKHR( VkPresentWait2InfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PresentWait2InfoKHR( *reinterpret_cast<PresentWait2InfoKHR const *>( &rhs ) )
{}
PresentWait2InfoKHR & operator=( PresentWait2InfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PresentWait2InfoKHR & operator=( VkPresentWait2InfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PresentWait2InfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentWait2InfoKHR & setPresentId( uint64_t presentId_ ) VULKAN_HPP_NOEXCEPT
{
presentId = presentId_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PresentWait2InfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
{
timeout = timeout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPresentWait2InfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentWait2InfoKHR*>( this );
}
operator VkPresentWait2InfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentWait2InfoKHR*>( this );
}
operator VkPresentWait2InfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPresentWait2InfoKHR*>( this );
}
operator VkPresentWait2InfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPresentWait2InfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentId, timeout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PresentWait2InfoKHR const & ) const = default;
#else
bool operator==( PresentWait2InfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentId == rhs.presentId )
&& ( timeout == rhs.timeout );
#endif
}
bool operator!=( PresentWait2InfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentWait2InfoKHR;
const void * pNext = {};
uint64_t presentId = {};
uint64_t timeout = {};
};
template <>
struct CppType<StructureType, StructureType::ePresentWait2InfoKHR>
{
using Type = PresentWait2InfoKHR;
};
// wrapper struct for struct VkPrivateDataSlotCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPrivateDataSlotCreateInfo.html
struct PrivateDataSlotCreateInfo
{
using NativeType = VkPrivateDataSlotCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo(VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PrivateDataSlotCreateInfo( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PrivateDataSlotCreateInfo( *reinterpret_cast<PrivateDataSlotCreateInfo const *>( &rhs ) )
{}
PrivateDataSlotCreateInfo & operator=( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PrivateDataSlotCreateInfo & operator=( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPrivateDataSlotCreateInfo*>( this );
}
operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPrivateDataSlotCreateInfo*>( this );
}
operator VkPrivateDataSlotCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPrivateDataSlotCreateInfo*>( this );
}
operator VkPrivateDataSlotCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPrivateDataSlotCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PrivateDataSlotCreateInfo const & ) const = default;
#else
bool operator==( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags = {};
};
template <>
struct CppType<StructureType, StructureType::ePrivateDataSlotCreateInfo>
{
using Type = PrivateDataSlotCreateInfo;
};
using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo;
// wrapper struct for struct VkProtectedSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkProtectedSubmitInfo.html
struct ProtectedSubmitInfo
{
using NativeType = VkProtectedSubmitInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo(VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, protectedSubmit{ protectedSubmit_ }
{}
VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ProtectedSubmitInfo( *reinterpret_cast<ProtectedSubmitInfo const *>( &rhs ) )
{}
ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
{
protectedSubmit = protectedSubmit_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkProtectedSubmitInfo*>( this );
}
operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkProtectedSubmitInfo*>( this );
}
operator VkProtectedSubmitInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkProtectedSubmitInfo*>( this );
}
operator VkProtectedSubmitInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkProtectedSubmitInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, protectedSubmit );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ProtectedSubmitInfo const & ) const = default;
#else
bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( protectedSubmit == rhs.protectedSubmit );
#endif
}
bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {};
};
template <>
struct CppType<StructureType, StructureType::eProtectedSubmitInfo>
{
using Type = ProtectedSubmitInfo;
};
// wrapper struct for struct VkPushConstantsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushConstantsInfo.html
struct PushConstantsInfo
{
using NativeType = VkPushConstantsInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushConstantsInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PushConstantsInfo(VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {}, const void * pValues_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, layout{ layout_ }, stageFlags{ stageFlags_ }, offset{ offset_ }, size{ size_ }, pValues{ pValues_ }
{}
VULKAN_HPP_CONSTEXPR PushConstantsInfo( PushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PushConstantsInfo( VkPushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PushConstantsInfo( *reinterpret_cast<PushConstantsInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
PushConstantsInfo( VULKAN_HPP_NAMESPACE::PipelineLayout layout_, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, uint32_t offset_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & values_, const void * pNext_ = nullptr )
: pNext( pNext_ ), layout( layout_ ), stageFlags( stageFlags_ ), offset( offset_ ), size( static_cast<uint32_t>( values_.size() * sizeof(T) ) ), pValues( values_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PushConstantsInfo & operator=( PushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PushConstantsInfo & operator=( VkPushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushConstantsInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
{
stageFlags = stageFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setPValues( const void * pValues_ ) VULKAN_HPP_NOEXCEPT
{
pValues = pValues_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
PushConstantsInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & values_ ) VULKAN_HPP_NOEXCEPT
{
size = static_cast<uint32_t>( values_.size() * sizeof(T) );
pValues = values_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPushConstantsInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPushConstantsInfo*>( this );
}
operator VkPushConstantsInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPushConstantsInfo*>( this );
}
operator VkPushConstantsInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPushConstantsInfo*>( this );
}
operator VkPushConstantsInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPushConstantsInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, uint32_t const &, uint32_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, layout, stageFlags, offset, size, pValues );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PushConstantsInfo const & ) const = default;
#else
bool operator==( PushConstantsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( layout == rhs.layout )
&& ( stageFlags == rhs.stageFlags )
&& ( offset == rhs.offset )
&& ( size == rhs.size )
&& ( pValues == rhs.pValues );
#endif
}
bool operator!=( PushConstantsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushConstantsInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
uint32_t offset = {};
uint32_t size = {};
const void * pValues = {};
};
template <>
struct CppType<StructureType, StructureType::ePushConstantsInfo>
{
using Type = PushConstantsInfo;
};
using PushConstantsInfoKHR = PushConstantsInfo;
// wrapper struct for struct VkWriteDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSet.html
struct WriteDescriptorSet
{
using NativeType = VkWriteDescriptorSet;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dstSet{ dstSet_ }, dstBinding{ dstBinding_ }, dstArrayElement{ dstArrayElement_ }, descriptorCount{ descriptorCount_ }, descriptorType{ descriptorType_ }, pImageInfo{ pImageInfo_ }, pBufferInfo{ pBufferInfo_ }, pTexelBufferView{ pTexelBufferView_ }
{}
VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
: WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, uint32_t dstBinding_, uint32_t dstArrayElement_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size() : !bufferInfo_.empty() ? bufferInfo_.size() : texelBufferView_.size() ) ), descriptorType( descriptorType_ ), pImageInfo( imageInfo_.data() ), pBufferInfo( bufferInfo_.data() ), pTexelBufferView( texelBufferView_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1);
#else
if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
{
dstSet = dstSet_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
{
dstBinding = dstBinding_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
{
dstArrayElement = dstArrayElement_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = descriptorCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
descriptorType = descriptorType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT
{
pImageInfo = pImageInfo_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSet & setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
pImageInfo = imageInfo_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
{
pBufferInfo = pBufferInfo_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSet & setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
pBufferInfo = bufferInfo_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
{
pTexelBufferView = pTexelBufferView_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSet & setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = static_cast<uint32_t>( texelBufferView_.size() );
pTexelBufferView = texelBufferView_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteDescriptorSet*>( this );
}
operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWriteDescriptorSet*>( this );
}
operator VkWriteDescriptorSet const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkWriteDescriptorSet*>( this );
}
operator VkWriteDescriptorSet *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkWriteDescriptorSet*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSet const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * const &, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * const &, const VULKAN_HPP_NAMESPACE::BufferView * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dstSet, dstBinding, dstArrayElement, descriptorCount, descriptorType, pImageInfo, pBufferInfo, pTexelBufferView );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( WriteDescriptorSet const & ) const = default;
#else
bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dstSet == rhs.dstSet )
&& ( dstBinding == rhs.dstBinding )
&& ( dstArrayElement == rhs.dstArrayElement )
&& ( descriptorCount == rhs.descriptorCount )
&& ( descriptorType == rhs.descriptorType )
&& ( pImageInfo == rhs.pImageInfo )
&& ( pBufferInfo == rhs.pBufferInfo )
&& ( pTexelBufferView == rhs.pTexelBufferView );
#endif
}
bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
uint32_t dstBinding = {};
uint32_t dstArrayElement = {};
uint32_t descriptorCount = {};
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {};
const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {};
const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {};
};
template <>
struct CppType<StructureType, StructureType::eWriteDescriptorSet>
{
using Type = WriteDescriptorSet;
};
// wrapper struct for struct VkPushDescriptorSetInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushDescriptorSetInfo.html
struct PushDescriptorSetInfo
{
using NativeType = VkPushDescriptorSetInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDescriptorSetInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PushDescriptorSetInfo(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, uint32_t set_ = {}, uint32_t descriptorWriteCount_ = {}, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stageFlags{ stageFlags_ }, layout{ layout_ }, set{ set_ }, descriptorWriteCount{ descriptorWriteCount_ }, pDescriptorWrites{ pDescriptorWrites_ }
{}
VULKAN_HPP_CONSTEXPR PushDescriptorSetInfo( PushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PushDescriptorSetInfo( VkPushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PushDescriptorSetInfo( *reinterpret_cast<PushDescriptorSetInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PushDescriptorSetInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::PipelineLayout layout_, uint32_t set_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites_, const void * pNext_ = nullptr )
: pNext( pNext_ ), stageFlags( stageFlags_ ), layout( layout_ ), set( set_ ), descriptorWriteCount( static_cast<uint32_t>( descriptorWrites_.size() ) ), pDescriptorWrites( descriptorWrites_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
PushDescriptorSetInfo & operator=( PushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PushDescriptorSetInfo & operator=( VkPushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
{
stageFlags = stageFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
{
set = set_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setDescriptorWriteCount( uint32_t descriptorWriteCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorWriteCount = descriptorWriteCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setPDescriptorWrites( const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites_ ) VULKAN_HPP_NOEXCEPT
{
pDescriptorWrites = pDescriptorWrites_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PushDescriptorSetInfo & setDescriptorWrites( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites_ ) VULKAN_HPP_NOEXCEPT
{
descriptorWriteCount = static_cast<uint32_t>( descriptorWrites_.size() );
pDescriptorWrites = descriptorWrites_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPushDescriptorSetInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPushDescriptorSetInfo*>( this );
}
operator VkPushDescriptorSetInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPushDescriptorSetInfo*>( this );
}
operator VkPushDescriptorSetInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPushDescriptorSetInfo*>( this );
}
operator VkPushDescriptorSetInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPushDescriptorSetInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stageFlags, layout, set, descriptorWriteCount, pDescriptorWrites );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PushDescriptorSetInfo const & ) const = default;
#else
bool operator==( PushDescriptorSetInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stageFlags == rhs.stageFlags )
&& ( layout == rhs.layout )
&& ( set == rhs.set )
&& ( descriptorWriteCount == rhs.descriptorWriteCount )
&& ( pDescriptorWrites == rhs.pDescriptorWrites );
#endif
}
bool operator!=( PushDescriptorSetInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushDescriptorSetInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
uint32_t set = {};
uint32_t descriptorWriteCount = {};
const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites = {};
};
template <>
struct CppType<StructureType, StructureType::ePushDescriptorSetInfo>
{
using Type = PushDescriptorSetInfo;
};
using PushDescriptorSetInfoKHR = PushDescriptorSetInfo;
// wrapper struct for struct VkPushDescriptorSetWithTemplateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushDescriptorSetWithTemplateInfo.html
struct PushDescriptorSetWithTemplateInfo
{
using NativeType = VkPushDescriptorSetWithTemplateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDescriptorSetWithTemplateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfo(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, uint32_t set_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, descriptorUpdateTemplate{ descriptorUpdateTemplate_ }, layout{ layout_ }, set{ set_ }, pData{ pData_ }
{}
VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfo( PushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PushDescriptorSetWithTemplateInfo( VkPushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PushDescriptorSetWithTemplateInfo( *reinterpret_cast<PushDescriptorSetWithTemplateInfo const *>( &rhs ) )
{}
PushDescriptorSetWithTemplateInfo & operator=( PushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
PushDescriptorSetWithTemplateInfo & operator=( VkPushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate_ ) VULKAN_HPP_NOEXCEPT
{
descriptorUpdateTemplate = descriptorUpdateTemplate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
{
set = set_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
{
pData = pData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkPushDescriptorSetWithTemplateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo*>( this );
}
operator VkPushDescriptorSetWithTemplateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPushDescriptorSetWithTemplateInfo*>( this );
}
operator VkPushDescriptorSetWithTemplateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo*>( this );
}
operator VkPushDescriptorSetWithTemplateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkPushDescriptorSetWithTemplateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, uint32_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, descriptorUpdateTemplate, layout, set, pData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( PushDescriptorSetWithTemplateInfo const & ) const = default;
#else
bool operator==( PushDescriptorSetWithTemplateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( descriptorUpdateTemplate == rhs.descriptorUpdateTemplate )
&& ( layout == rhs.layout )
&& ( set == rhs.set )
&& ( pData == rhs.pData );
#endif
}
bool operator!=( PushDescriptorSetWithTemplateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushDescriptorSetWithTemplateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate = {};
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
uint32_t set = {};
const void * pData = {};
};
template <>
struct CppType<StructureType, StructureType::ePushDescriptorSetWithTemplateInfo>
{
using Type = PushDescriptorSetWithTemplateInfo;
};
using PushDescriptorSetWithTemplateInfoKHR = PushDescriptorSetWithTemplateInfo;
// wrapper struct for struct VkQueryLowLatencySupportNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryLowLatencySupportNV.html
struct QueryLowLatencySupportNV
{
using NativeType = VkQueryLowLatencySupportNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryLowLatencySupportNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueryLowLatencySupportNV(void * pQueriedLowLatencyData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pQueriedLowLatencyData{ pQueriedLowLatencyData_ }
{}
VULKAN_HPP_CONSTEXPR QueryLowLatencySupportNV( QueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueryLowLatencySupportNV( VkQueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT
: QueryLowLatencySupportNV( *reinterpret_cast<QueryLowLatencySupportNV const *>( &rhs ) )
{}
QueryLowLatencySupportNV & operator=( QueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueryLowLatencySupportNV & operator=( VkQueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 QueryLowLatencySupportNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 QueryLowLatencySupportNV & setPQueriedLowLatencyData( void * pQueriedLowLatencyData_ ) VULKAN_HPP_NOEXCEPT
{
pQueriedLowLatencyData = pQueriedLowLatencyData_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkQueryLowLatencySupportNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueryLowLatencySupportNV*>( this );
}
operator VkQueryLowLatencySupportNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueryLowLatencySupportNV*>( this );
}
operator VkQueryLowLatencySupportNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueryLowLatencySupportNV*>( this );
}
operator VkQueryLowLatencySupportNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueryLowLatencySupportNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pQueriedLowLatencyData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( QueryLowLatencySupportNV const & ) const = default;
#else
bool operator==( QueryLowLatencySupportNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pQueriedLowLatencyData == rhs.pQueriedLowLatencyData );
#endif
}
bool operator!=( QueryLowLatencySupportNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryLowLatencySupportNV;
const void * pNext = {};
void * pQueriedLowLatencyData = {};
};
template <>
struct CppType<StructureType, StructureType::eQueryLowLatencySupportNV>
{
using Type = QueryLowLatencySupportNV;
};
// wrapper struct for struct VkQueryPoolCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolCreateInfo.html
struct QueryPoolCreateInfo
{
using NativeType = VkQueryPoolCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo(VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, uint32_t queryCount_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, queryType{ queryType_ }, queryCount{ queryCount_ }, pipelineStatistics{ pipelineStatistics_ }
{}
VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: QueryPoolCreateInfo( *reinterpret_cast<QueryPoolCreateInfo const *>( &rhs ) )
{}
QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
{
queryType = queryType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
{
queryCount = queryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStatistics = pipelineStatistics_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueryPoolCreateInfo*>( this );
}
operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueryPoolCreateInfo*>( this );
}
operator VkQueryPoolCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueryPoolCreateInfo*>( this );
}
operator VkQueryPoolCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueryPoolCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags const &, VULKAN_HPP_NAMESPACE::QueryType const &, uint32_t const &, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, queryType, queryCount, pipelineStatistics );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( QueryPoolCreateInfo const & ) const = default;
#else
bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( queryType == rhs.queryType )
&& ( queryCount == rhs.queryCount )
&& ( pipelineStatistics == rhs.pipelineStatistics );
#endif
}
bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion;
uint32_t queryCount = {};
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
};
template <>
struct CppType<StructureType, StructureType::eQueryPoolCreateInfo>
{
using Type = QueryPoolCreateInfo;
};
// wrapper struct for struct VkQueryPoolPerformanceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolPerformanceCreateInfoKHR.html
struct QueryPoolPerformanceCreateInfoKHR
{
using NativeType = VkQueryPoolPerformanceCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR(uint32_t queueFamilyIndex_ = {}, uint32_t counterIndexCount_ = {}, const uint32_t * pCounterIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, queueFamilyIndex{ queueFamilyIndex_ }, counterIndexCount{ counterIndexCount_ }, pCounterIndices{ pCounterIndices_ }
{}
VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast<QueryPoolPerformanceCreateInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_, const void * pNext_ = nullptr )
: pNext( pNext_ ), queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( static_cast<uint32_t>( counterIndices_.size() ) ), pCounterIndices( counterIndices_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndex = queueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
counterIndexCount = counterIndexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
{
pCounterIndices = pCounterIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
QueryPoolPerformanceCreateInfoKHR & setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ ) VULKAN_HPP_NOEXCEPT
{
counterIndexCount = static_cast<uint32_t>( counterIndices_.size() );
pCounterIndices = counterIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( this );
}
operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR*>( this );
}
operator VkQueryPoolPerformanceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( this );
}
operator VkQueryPoolPerformanceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, queueFamilyIndex, counterIndexCount, pCounterIndices );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default;
#else
bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( queueFamilyIndex == rhs.queueFamilyIndex )
&& ( counterIndexCount == rhs.counterIndexCount )
&& ( pCounterIndices == rhs.pCounterIndices );
#endif
}
bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
const void * pNext = {};
uint32_t queueFamilyIndex = {};
uint32_t counterIndexCount = {};
const uint32_t * pCounterIndices = {};
};
template <>
struct CppType<StructureType, StructureType::eQueryPoolPerformanceCreateInfoKHR>
{
using Type = QueryPoolPerformanceCreateInfoKHR;
};
// wrapper struct for struct VkQueryPoolPerformanceQueryCreateInfoINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolPerformanceQueryCreateInfoINTEL.html
struct QueryPoolPerformanceQueryCreateInfoINTEL
{
using NativeType = VkQueryPoolPerformanceQueryCreateInfoINTEL;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL(VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, performanceCountersSampling{ performanceCountersSampling_ }
{}
VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: QueryPoolPerformanceQueryCreateInfoINTEL( *reinterpret_cast<QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs ) )
{}
QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
{
performanceCountersSampling = performanceCountersSampling_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
}
operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
}
operator VkQueryPoolPerformanceQueryCreateInfoINTEL const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
}
operator VkQueryPoolPerformanceQueryCreateInfoINTEL *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, performanceCountersSampling );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default;
#else
bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( performanceCountersSampling == rhs.performanceCountersSampling );
#endif
}
bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual;
};
template <>
struct CppType<StructureType, StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL>
{
using Type = QueryPoolPerformanceQueryCreateInfoINTEL;
};
using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL;
// wrapper struct for struct VkQueryPoolVideoEncodeFeedbackCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolVideoEncodeFeedbackCreateInfoKHR.html
struct QueryPoolVideoEncodeFeedbackCreateInfoKHR
{
using NativeType = VkQueryPoolVideoEncodeFeedbackCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueryPoolVideoEncodeFeedbackCreateInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, encodeFeedbackFlags{ encodeFeedbackFlags_ }
{}
VULKAN_HPP_CONSTEXPR QueryPoolVideoEncodeFeedbackCreateInfoKHR( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueryPoolVideoEncodeFeedbackCreateInfoKHR( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: QueryPoolVideoEncodeFeedbackCreateInfoKHR( *reinterpret_cast<QueryPoolVideoEncodeFeedbackCreateInfoKHR const *>( &rhs ) )
{}
QueryPoolVideoEncodeFeedbackCreateInfoKHR & operator=( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueryPoolVideoEncodeFeedbackCreateInfoKHR & operator=( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 QueryPoolVideoEncodeFeedbackCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 QueryPoolVideoEncodeFeedbackCreateInfoKHR & setEncodeFeedbackFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags_ ) VULKAN_HPP_NOEXCEPT
{
encodeFeedbackFlags = encodeFeedbackFlags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueryPoolVideoEncodeFeedbackCreateInfoKHR*>( this );
}
operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueryPoolVideoEncodeFeedbackCreateInfoKHR*>( this );
}
operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueryPoolVideoEncodeFeedbackCreateInfoKHR*>( this );
}
operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueryPoolVideoEncodeFeedbackCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, encodeFeedbackFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & ) const = default;
#else
bool operator==( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( encodeFeedbackFlags == rhs.encodeFeedbackFlags );
#endif
}
bool operator!=( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags = {};
};
template <>
struct CppType<StructureType, StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR>
{
using Type = QueryPoolVideoEncodeFeedbackCreateInfoKHR;
};
// wrapper struct for struct VkQueueFamilyCheckpointProperties2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyCheckpointProperties2NV.html
struct QueueFamilyCheckpointProperties2NV
{
using NativeType = VkQueueFamilyCheckpointProperties2NV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointProperties2NV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, checkpointExecutionStageMask{ checkpointExecutionStageMask_ }
{}
VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
: QueueFamilyCheckpointProperties2NV( *reinterpret_cast<QueueFamilyCheckpointProperties2NV const *>( &rhs ) )
{}
QueueFamilyCheckpointProperties2NV & operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueueFamilyCheckpointProperties2NV & operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const *>( &rhs );
return *this;
}
operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyCheckpointProperties2NV*>( this );
}
operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyCheckpointProperties2NV*>( this );
}
operator VkQueueFamilyCheckpointProperties2NV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueueFamilyCheckpointProperties2NV*>( this );
}
operator VkQueueFamilyCheckpointProperties2NV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueueFamilyCheckpointProperties2NV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, checkpointExecutionStageMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default;
#else
bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
#endif
}
bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask = {};
};
template <>
struct CppType<StructureType, StructureType::eQueueFamilyCheckpointProperties2NV>
{
using Type = QueueFamilyCheckpointProperties2NV;
};
// wrapper struct for struct VkQueueFamilyCheckpointPropertiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyCheckpointPropertiesNV.html
struct QueueFamilyCheckpointPropertiesNV
{
using NativeType = VkQueueFamilyCheckpointPropertiesNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV(VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, checkpointExecutionStageMask{ checkpointExecutionStageMask_ }
{}
VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: QueueFamilyCheckpointPropertiesNV( *reinterpret_cast<QueueFamilyCheckpointPropertiesNV const *>( &rhs ) )
{}
QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const *>( &rhs );
return *this;
}
operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV*>( this );
}
operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV*>( this );
}
operator VkQueueFamilyCheckpointPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV*>( this );
}
operator VkQueueFamilyCheckpointPropertiesNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, checkpointExecutionStageMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default;
#else
bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
#endif
}
bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {};
};
template <>
struct CppType<StructureType, StructureType::eQueueFamilyCheckpointPropertiesNV>
{
using Type = QueueFamilyCheckpointPropertiesNV;
};
// wrapper struct for struct VkQueueFamilyGlobalPriorityProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyGlobalPriorityProperties.html
struct QueueFamilyGlobalPriorityProperties
{
using NativeType = VkQueueFamilyGlobalPriorityProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyGlobalPriorityProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityProperties(uint32_t priorityCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriority,VK_MAX_GLOBAL_PRIORITY_SIZE> const & priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow } }, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, priorityCount{ priorityCount_ }, priorities{ priorities_ }
{}
VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityProperties( QueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyGlobalPriorityProperties( VkQueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: QueueFamilyGlobalPriorityProperties( *reinterpret_cast<QueueFamilyGlobalPriorityProperties const *>( &rhs ) )
{}
QueueFamilyGlobalPriorityProperties & operator=( QueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueueFamilyGlobalPriorityProperties & operator=( VkQueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityProperties const *>( &rhs );
return *this;
}
operator VkQueueFamilyGlobalPriorityProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyGlobalPriorityProperties*>( this );
}
operator VkQueueFamilyGlobalPriorityProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyGlobalPriorityProperties*>( this );
}
operator VkQueueFamilyGlobalPriorityProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueueFamilyGlobalPriorityProperties*>( this );
}
operator VkQueueFamilyGlobalPriorityProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueueFamilyGlobalPriorityProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::QueueGlobalPriority, VK_MAX_GLOBAL_PRIORITY_SIZE> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, priorityCount, priorities );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = priorityCount <=> rhs.priorityCount; cmp != 0 ) return cmp;
for ( size_t i = 0; i < priorityCount; ++i )
{
if ( auto cmp = priorities[i] <=> rhs.priorities[i]; cmp != 0 ) return cmp;
}
return std::strong_ordering::equivalent;
}
#endif
bool operator==( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( priorityCount == rhs.priorityCount )
&& ( memcmp( priorities, rhs.priorities, priorityCount * sizeof( VULKAN_HPP_NAMESPACE::QueueGlobalPriority ) ) == 0 );
}
bool operator!=( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityProperties;
void * pNext = {};
uint32_t priorityCount = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::QueueGlobalPriority, VK_MAX_GLOBAL_PRIORITY_SIZE> priorities = {};
};
template <>
struct CppType<StructureType, StructureType::eQueueFamilyGlobalPriorityProperties>
{
using Type = QueueFamilyGlobalPriorityProperties;
};
using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityProperties;
using QueueFamilyGlobalPriorityPropertiesKHR = QueueFamilyGlobalPriorityProperties;
// wrapper struct for struct VkQueueFamilyOwnershipTransferPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyOwnershipTransferPropertiesKHR.html
struct QueueFamilyOwnershipTransferPropertiesKHR
{
using NativeType = VkQueueFamilyOwnershipTransferPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyOwnershipTransferPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueueFamilyOwnershipTransferPropertiesKHR(uint32_t optimalImageTransferToQueueFamilies_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, optimalImageTransferToQueueFamilies{ optimalImageTransferToQueueFamilies_ }
{}
VULKAN_HPP_CONSTEXPR QueueFamilyOwnershipTransferPropertiesKHR( QueueFamilyOwnershipTransferPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyOwnershipTransferPropertiesKHR( VkQueueFamilyOwnershipTransferPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: QueueFamilyOwnershipTransferPropertiesKHR( *reinterpret_cast<QueueFamilyOwnershipTransferPropertiesKHR const *>( &rhs ) )
{}
QueueFamilyOwnershipTransferPropertiesKHR & operator=( QueueFamilyOwnershipTransferPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueueFamilyOwnershipTransferPropertiesKHR & operator=( VkQueueFamilyOwnershipTransferPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyOwnershipTransferPropertiesKHR const *>( &rhs );
return *this;
}
operator VkQueueFamilyOwnershipTransferPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyOwnershipTransferPropertiesKHR*>( this );
}
operator VkQueueFamilyOwnershipTransferPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyOwnershipTransferPropertiesKHR*>( this );
}
operator VkQueueFamilyOwnershipTransferPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueueFamilyOwnershipTransferPropertiesKHR*>( this );
}
operator VkQueueFamilyOwnershipTransferPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueueFamilyOwnershipTransferPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, optimalImageTransferToQueueFamilies );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( QueueFamilyOwnershipTransferPropertiesKHR const & ) const = default;
#else
bool operator==( QueueFamilyOwnershipTransferPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( optimalImageTransferToQueueFamilies == rhs.optimalImageTransferToQueueFamilies );
#endif
}
bool operator!=( QueueFamilyOwnershipTransferPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyOwnershipTransferPropertiesKHR;
void * pNext = {};
uint32_t optimalImageTransferToQueueFamilies = {};
};
template <>
struct CppType<StructureType, StructureType::eQueueFamilyOwnershipTransferPropertiesKHR>
{
using Type = QueueFamilyOwnershipTransferPropertiesKHR;
};
// wrapper struct for struct VkQueueFamilyProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyProperties.html
struct QueueFamilyProperties
{
using NativeType = VkQueueFamilyProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueueFamilyProperties(VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, uint32_t queueCount_ = {}, uint32_t timestampValidBits_ = {}, VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {}) VULKAN_HPP_NOEXCEPT
: queueFlags{ queueFlags_ }, queueCount{ queueCount_ }, timestampValidBits{ timestampValidBits_ }, minImageTransferGranularity{ minImageTransferGranularity_ }
{}
VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: QueueFamilyProperties( *reinterpret_cast<QueueFamilyProperties const *>( &rhs ) )
{}
QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties const *>( &rhs );
return *this;
}
operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyProperties*>( this );
}
operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyProperties*>( this );
}
operator VkQueueFamilyProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueueFamilyProperties*>( this );
}
operator VkQueueFamilyProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueueFamilyProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::QueueFlags const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( queueFlags, queueCount, timestampValidBits, minImageTransferGranularity );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( QueueFamilyProperties const & ) const = default;
#else
bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( queueFlags == rhs.queueFlags )
&& ( queueCount == rhs.queueCount )
&& ( timestampValidBits == rhs.timestampValidBits )
&& ( minImageTransferGranularity == rhs.minImageTransferGranularity );
#endif
}
bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {};
uint32_t queueCount = {};
uint32_t timestampValidBits = {};
VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {};
};
// wrapper struct for struct VkQueueFamilyProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyProperties2.html
struct QueueFamilyProperties2
{
using NativeType = VkQueueFamilyProperties2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueueFamilyProperties2(VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, queueFamilyProperties{ queueFamilyProperties_ }
{}
VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
: QueueFamilyProperties2( *reinterpret_cast<QueueFamilyProperties2 const *>( &rhs ) )
{}
QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const *>( &rhs );
return *this;
}
operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyProperties2*>( this );
}
operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyProperties2*>( this );
}
operator VkQueueFamilyProperties2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueueFamilyProperties2*>( this );
}
operator VkQueueFamilyProperties2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueueFamilyProperties2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::QueueFamilyProperties const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, queueFamilyProperties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( QueueFamilyProperties2 const & ) const = default;
#else
bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( queueFamilyProperties == rhs.queueFamilyProperties );
#endif
}
bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2;
void * pNext = {};
VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {};
};
template <>
struct CppType<StructureType, StructureType::eQueueFamilyProperties2>
{
using Type = QueueFamilyProperties2;
};
using QueueFamilyProperties2KHR = QueueFamilyProperties2;
// wrapper struct for struct VkQueueFamilyQueryResultStatusPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyQueryResultStatusPropertiesKHR.html
struct QueueFamilyQueryResultStatusPropertiesKHR
{
using NativeType = VkQueueFamilyQueryResultStatusPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, queryResultStatusSupport{ queryResultStatusSupport_ }
{}
VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyQueryResultStatusPropertiesKHR( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: QueueFamilyQueryResultStatusPropertiesKHR( *reinterpret_cast<QueueFamilyQueryResultStatusPropertiesKHR const *>( &rhs ) )
{}
QueueFamilyQueryResultStatusPropertiesKHR & operator=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueueFamilyQueryResultStatusPropertiesKHR & operator=( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR const *>( &rhs );
return *this;
}
operator VkQueueFamilyQueryResultStatusPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyQueryResultStatusPropertiesKHR*>( this );
}
operator VkQueueFamilyQueryResultStatusPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyQueryResultStatusPropertiesKHR*>( this );
}
operator VkQueueFamilyQueryResultStatusPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueueFamilyQueryResultStatusPropertiesKHR*>( this );
}
operator VkQueueFamilyQueryResultStatusPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueueFamilyQueryResultStatusPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, queryResultStatusSupport );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( QueueFamilyQueryResultStatusPropertiesKHR const & ) const = default;
#else
bool operator==( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( queryResultStatusSupport == rhs.queryResultStatusSupport );
#endif
}
bool operator!=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport = {};
};
template <>
struct CppType<StructureType, StructureType::eQueueFamilyQueryResultStatusPropertiesKHR>
{
using Type = QueueFamilyQueryResultStatusPropertiesKHR;
};
// wrapper struct for struct VkQueueFamilyVideoPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyVideoPropertiesKHR.html
struct QueueFamilyVideoPropertiesKHR
{
using NativeType = VkQueueFamilyVideoPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyVideoPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR(VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, videoCodecOperations{ videoCodecOperations_ }
{}
VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyVideoPropertiesKHR( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: QueueFamilyVideoPropertiesKHR( *reinterpret_cast<QueueFamilyVideoPropertiesKHR const *>( &rhs ) )
{}
QueueFamilyVideoPropertiesKHR & operator=( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
QueueFamilyVideoPropertiesKHR & operator=( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR const *>( &rhs );
return *this;
}
operator VkQueueFamilyVideoPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyVideoPropertiesKHR*>( this );
}
operator VkQueueFamilyVideoPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyVideoPropertiesKHR*>( this );
}
operator VkQueueFamilyVideoPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkQueueFamilyVideoPropertiesKHR*>( this );
}
operator VkQueueFamilyVideoPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkQueueFamilyVideoPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, videoCodecOperations );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( QueueFamilyVideoPropertiesKHR const & ) const = default;
#else
bool operator==( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( videoCodecOperations == rhs.videoCodecOperations );
#endif
}
bool operator!=( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyVideoPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {};
};
template <>
struct CppType<StructureType, StructureType::eQueueFamilyVideoPropertiesKHR>
{
using Type = QueueFamilyVideoPropertiesKHR;
};
// wrapper struct for struct VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV.html
struct RayTracingPipelineClusterAccelerationStructureCreateInfoNV
{
using NativeType = VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineClusterAccelerationStructureCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RayTracingPipelineClusterAccelerationStructureCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 allowClusterAccelerationStructure_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, allowClusterAccelerationStructure{ allowClusterAccelerationStructure_ }
{}
VULKAN_HPP_CONSTEXPR RayTracingPipelineClusterAccelerationStructureCreateInfoNV( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingPipelineClusterAccelerationStructureCreateInfoNV( VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: RayTracingPipelineClusterAccelerationStructureCreateInfoNV( *reinterpret_cast<RayTracingPipelineClusterAccelerationStructureCreateInfoNV const *>( &rhs ) )
{}
RayTracingPipelineClusterAccelerationStructureCreateInfoNV & operator=( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RayTracingPipelineClusterAccelerationStructureCreateInfoNV & operator=( VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineClusterAccelerationStructureCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineClusterAccelerationStructureCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineClusterAccelerationStructureCreateInfoNV & setAllowClusterAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 allowClusterAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
allowClusterAccelerationStructure = allowClusterAccelerationStructure_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV*>( this );
}
operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV*>( this );
}
operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV*>( this );
}
operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, allowClusterAccelerationStructure );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & ) const = default;
#else
bool operator==( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( allowClusterAccelerationStructure == rhs.allowClusterAccelerationStructure );
#endif
}
bool operator!=( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineClusterAccelerationStructureCreateInfoNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 allowClusterAccelerationStructure = {};
};
template <>
struct CppType<StructureType, StructureType::eRayTracingPipelineClusterAccelerationStructureCreateInfoNV>
{
using Type = RayTracingPipelineClusterAccelerationStructureCreateInfoNV;
};
// wrapper struct for struct VkRayTracingShaderGroupCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingShaderGroupCreateInfoKHR.html
struct RayTracingShaderGroupCreateInfoKHR
{
using NativeType = VkRayTracingShaderGroupCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, uint32_t closestHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, uint32_t anyHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, uint32_t intersectionShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, const void * pShaderGroupCaptureReplayHandle_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, type{ type_ }, generalShader{ generalShader_ }, closestHitShader{ closestHitShader_ }, anyHitShader{ anyHitShader_ }, intersectionShader{ intersectionShader_ }, pShaderGroupCaptureReplayHandle{ pShaderGroupCaptureReplayHandle_ }
{}
VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast<RayTracingShaderGroupCreateInfoKHR const *>( &rhs ) )
{}
RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
{
generalShader = generalShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
{
closestHitShader = closestHitShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
{
anyHitShader = anyHitShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
{
intersectionShader = intersectionShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT
{
pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoKHR*>( this );
}
operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoKHR*>( this );
}
operator VkRayTracingShaderGroupCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRayTracingShaderGroupCreateInfoKHR*>( this );
}
operator VkRayTracingShaderGroupCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRayTracingShaderGroupCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader, pShaderGroupCaptureReplayHandle );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default;
#else
bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( type == rhs.type )
&& ( generalShader == rhs.generalShader )
&& ( closestHitShader == rhs.closestHitShader )
&& ( anyHitShader == rhs.anyHitShader )
&& ( intersectionShader == rhs.intersectionShader )
&& ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle );
#endif
}
bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
uint32_t generalShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR;
uint32_t closestHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR;
uint32_t anyHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR;
uint32_t intersectionShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR;
const void * pShaderGroupCaptureReplayHandle = {};
};
template <>
struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoKHR>
{
using Type = RayTracingShaderGroupCreateInfoKHR;
};
// wrapper struct for struct VkRayTracingPipelineInterfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingPipelineInterfaceCreateInfoKHR.html
struct RayTracingPipelineInterfaceCreateInfoKHR
{
using NativeType = VkRayTracingPipelineInterfaceCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR(uint32_t maxPipelineRayPayloadSize_ = {}, uint32_t maxPipelineRayHitAttributeSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxPipelineRayPayloadSize{ maxPipelineRayPayloadSize_ }, maxPipelineRayHitAttributeSize{ maxPipelineRayHitAttributeSize_ }
{}
VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: RayTracingPipelineInterfaceCreateInfoKHR( *reinterpret_cast<RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs ) )
{}
RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT
{
maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT
{
maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
}
operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
}
operator VkRayTracingPipelineInterfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
}
operator VkRayTracingPipelineInterfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxPipelineRayPayloadSize, maxPipelineRayHitAttributeSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default;
#else
bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize )
&& ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize );
#endif
}
bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
const void * pNext = {};
uint32_t maxPipelineRayPayloadSize = {};
uint32_t maxPipelineRayHitAttributeSize = {};
};
template <>
struct CppType<StructureType, StructureType::eRayTracingPipelineInterfaceCreateInfoKHR>
{
using Type = RayTracingPipelineInterfaceCreateInfoKHR;
};
// wrapper struct for struct VkRayTracingPipelineCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingPipelineCreateInfoKHR.html
struct RayTracingPipelineCreateInfoKHR
{
using NativeType = VkRayTracingPipelineCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {}, uint32_t maxPipelineRayRecursionDepth_ = {}, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, stageCount{ stageCount_ }, pStages{ pStages_ }, groupCount{ groupCount_ }, pGroups{ pGroups_ }, maxPipelineRayRecursionDepth{ maxPipelineRayRecursionDepth_ }, pLibraryInfo{ pLibraryInfo_ }, pLibraryInterface{ pLibraryInterface_ }, pDynamicState{ pDynamicState_ }, layout{ layout_ }, basePipelineHandle{ basePipelineHandle_ }, basePipelineIndex{ basePipelineIndex_ }
{}
VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: RayTracingPipelineCreateInfoKHR( *reinterpret_cast<RayTracingPipelineCreateInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ = {}, uint32_t maxPipelineRayRecursionDepth_ = {}, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ), pLibraryInfo( pLibraryInfo_ ), pLibraryInterface( pLibraryInterface_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = stageCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
{
pStages = pStages_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RayTracingPipelineCreateInfoKHR & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = static_cast<uint32_t>( stages_.size() );
pStages = stages_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
{
groupCount = groupCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT
{
pGroups = pGroups_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RayTracingPipelineCreateInfoKHR & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT
{
groupCount = static_cast<uint32_t>( groups_.size() );
pGroups = groups_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
{
maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT
{
pLibraryInfo = pLibraryInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT
{
pLibraryInterface = pLibraryInterface_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
{
pDynamicState = pDynamicState_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineHandle = basePipelineHandle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineIndex = basePipelineIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( this );
}
operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRayTracingPipelineCreateInfoKHR*>( this );
}
operator VkRayTracingPipelineCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( this );
}
operator VkRayTracingPipelineCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRayTracingPipelineCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * const &, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * const &, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxPipelineRayRecursionDepth, pLibraryInfo, pLibraryInterface, pDynamicState, layout, basePipelineHandle, basePipelineIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default;
#else
bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( stageCount == rhs.stageCount )
&& ( pStages == rhs.pStages )
&& ( groupCount == rhs.groupCount )
&& ( pGroups == rhs.pGroups )
&& ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth )
&& ( pLibraryInfo == rhs.pLibraryInfo )
&& ( pLibraryInterface == rhs.pLibraryInterface )
&& ( pDynamicState == rhs.pDynamicState )
&& ( layout == rhs.layout )
&& ( basePipelineHandle == rhs.basePipelineHandle )
&& ( basePipelineIndex == rhs.basePipelineIndex );
#endif
}
bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
uint32_t stageCount = {};
const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
uint32_t groupCount = {};
const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups = {};
uint32_t maxPipelineRayRecursionDepth = {};
const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {};
const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface = {};
const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {};
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
int32_t basePipelineIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoKHR>
{
using Type = RayTracingPipelineCreateInfoKHR;
};
// wrapper struct for struct VkRayTracingShaderGroupCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingShaderGroupCreateInfoNV.html
struct RayTracingShaderGroupCreateInfoNV
{
using NativeType = VkRayTracingShaderGroupCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, uint32_t closestHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, uint32_t anyHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, uint32_t intersectionShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, type{ type_ }, generalShader{ generalShader_ }, closestHitShader{ closestHitShader_ }, anyHitShader{ anyHitShader_ }, intersectionShader{ intersectionShader_ }
{}
VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: RayTracingShaderGroupCreateInfoNV( *reinterpret_cast<RayTracingShaderGroupCreateInfoNV const *>( &rhs ) )
{}
RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
{
generalShader = generalShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
{
closestHitShader = closestHitShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
{
anyHitShader = anyHitShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
{
intersectionShader = intersectionShader_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV*>( this );
}
operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV*>( this );
}
operator VkRayTracingShaderGroupCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV*>( this );
}
operator VkRayTracingShaderGroupCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default;
#else
bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( type == rhs.type )
&& ( generalShader == rhs.generalShader )
&& ( closestHitShader == rhs.closestHitShader )
&& ( anyHitShader == rhs.anyHitShader )
&& ( intersectionShader == rhs.intersectionShader );
#endif
}
bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
uint32_t generalShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV;
uint32_t closestHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV;
uint32_t anyHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV;
uint32_t intersectionShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV;
};
template <>
struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoNV>
{
using Type = RayTracingShaderGroupCreateInfoNV;
};
// wrapper struct for struct VkRayTracingPipelineCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingPipelineCreateInfoNV.html
struct RayTracingPipelineCreateInfoNV
{
using NativeType = VkRayTracingPipelineCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, stageCount{ stageCount_ }, pStages{ pStages_ }, groupCount{ groupCount_ }, pGroups{ pGroups_ }, maxRecursionDepth{ maxRecursionDepth_ }, layout{ layout_ }, basePipelineHandle{ basePipelineHandle_ }, basePipelineIndex{ basePipelineIndex_ }
{}
VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: RayTracingPipelineCreateInfoNV( *reinterpret_cast<RayTracingPipelineCreateInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = stageCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
{
pStages = pStages_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RayTracingPipelineCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = static_cast<uint32_t>( stages_.size() );
pStages = stages_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
{
groupCount = groupCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
{
pGroups = pGroups_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RayTracingPipelineCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
{
groupCount = static_cast<uint32_t>( groups_.size() );
pGroups = groups_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
{
maxRecursionDepth = maxRecursionDepth_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineHandle = basePipelineHandle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineIndex = basePipelineIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( this );
}
operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRayTracingPipelineCreateInfoNV*>( this );
}
operator VkRayTracingPipelineCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( this );
}
operator VkRayTracingPipelineCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRayTracingPipelineCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxRecursionDepth, layout, basePipelineHandle, basePipelineIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default;
#else
bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( stageCount == rhs.stageCount )
&& ( pStages == rhs.pStages )
&& ( groupCount == rhs.groupCount )
&& ( pGroups == rhs.pGroups )
&& ( maxRecursionDepth == rhs.maxRecursionDepth )
&& ( layout == rhs.layout )
&& ( basePipelineHandle == rhs.basePipelineHandle )
&& ( basePipelineIndex == rhs.basePipelineIndex );
#endif
}
bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
uint32_t stageCount = {};
const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
uint32_t groupCount = {};
const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups = {};
uint32_t maxRecursionDepth = {};
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
int32_t basePipelineIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoNV>
{
using Type = RayTracingPipelineCreateInfoNV;
};
// wrapper struct for struct VkRefreshCycleDurationGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRefreshCycleDurationGOOGLE.html
struct RefreshCycleDurationGOOGLE
{
using NativeType = VkRefreshCycleDurationGOOGLE;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE(uint64_t refreshDuration_ = {}) VULKAN_HPP_NOEXCEPT
: refreshDuration{ refreshDuration_ }
{}
VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
: RefreshCycleDurationGOOGLE( *reinterpret_cast<RefreshCycleDurationGOOGLE const *>( &rhs ) )
{}
RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const *>( &rhs );
return *this;
}
operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE*>( this );
}
operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( this );
}
operator VkRefreshCycleDurationGOOGLE const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRefreshCycleDurationGOOGLE*>( this );
}
operator VkRefreshCycleDurationGOOGLE *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( refreshDuration );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default;
#else
bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( refreshDuration == rhs.refreshDuration );
#endif
}
bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint64_t refreshDuration = {};
};
// wrapper struct for struct VkReleaseCapturedPipelineDataInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkReleaseCapturedPipelineDataInfoKHR.html
struct ReleaseCapturedPipelineDataInfoKHR
{
using NativeType = VkReleaseCapturedPipelineDataInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eReleaseCapturedPipelineDataInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ReleaseCapturedPipelineDataInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pipeline{ pipeline_ }
{}
VULKAN_HPP_CONSTEXPR ReleaseCapturedPipelineDataInfoKHR( ReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ReleaseCapturedPipelineDataInfoKHR( VkReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ReleaseCapturedPipelineDataInfoKHR( *reinterpret_cast<ReleaseCapturedPipelineDataInfoKHR const *>( &rhs ) )
{}
ReleaseCapturedPipelineDataInfoKHR & operator=( ReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ReleaseCapturedPipelineDataInfoKHR & operator=( VkReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ReleaseCapturedPipelineDataInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ReleaseCapturedPipelineDataInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkReleaseCapturedPipelineDataInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR*>( this );
}
operator VkReleaseCapturedPipelineDataInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkReleaseCapturedPipelineDataInfoKHR*>( this );
}
operator VkReleaseCapturedPipelineDataInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR*>( this );
}
operator VkReleaseCapturedPipelineDataInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkReleaseCapturedPipelineDataInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Pipeline const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pipeline );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ReleaseCapturedPipelineDataInfoKHR const & ) const = default;
#else
bool operator==( ReleaseCapturedPipelineDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pipeline == rhs.pipeline );
#endif
}
bool operator!=( ReleaseCapturedPipelineDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eReleaseCapturedPipelineDataInfoKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
};
template <>
struct CppType<StructureType, StructureType::eReleaseCapturedPipelineDataInfoKHR>
{
using Type = ReleaseCapturedPipelineDataInfoKHR;
};
// wrapper struct for struct VkReleaseSwapchainImagesInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkReleaseSwapchainImagesInfoEXT.html
struct ReleaseSwapchainImagesInfoEXT
{
using NativeType = VkReleaseSwapchainImagesInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eReleaseSwapchainImagesInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ReleaseSwapchainImagesInfoEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndexCount_ = {}, const uint32_t * pImageIndices_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, swapchain{ swapchain_ }, imageIndexCount{ imageIndexCount_ }, pImageIndices{ pImageIndices_ }
{}
VULKAN_HPP_CONSTEXPR ReleaseSwapchainImagesInfoEXT( ReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ReleaseSwapchainImagesInfoEXT( VkReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ReleaseSwapchainImagesInfoEXT( *reinterpret_cast<ReleaseSwapchainImagesInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ReleaseSwapchainImagesInfoEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_, const void * pNext_ = nullptr )
: pNext( pNext_ ), swapchain( swapchain_ ), imageIndexCount( static_cast<uint32_t>( imageIndices_.size() ) ), pImageIndices( imageIndices_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ReleaseSwapchainImagesInfoEXT & operator=( ReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ReleaseSwapchainImagesInfoEXT & operator=( VkReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
{
swapchain = swapchain_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setImageIndexCount( uint32_t imageIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
imageIndexCount = imageIndexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT
{
pImageIndices = pImageIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ReleaseSwapchainImagesInfoEXT & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
{
imageIndexCount = static_cast<uint32_t>( imageIndices_.size() );
pImageIndices = imageIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkReleaseSwapchainImagesInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT*>( this );
}
operator VkReleaseSwapchainImagesInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkReleaseSwapchainImagesInfoEXT*>( this );
}
operator VkReleaseSwapchainImagesInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT*>( this );
}
operator VkReleaseSwapchainImagesInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkReleaseSwapchainImagesInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, swapchain, imageIndexCount, pImageIndices );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ReleaseSwapchainImagesInfoEXT const & ) const = default;
#else
bool operator==( ReleaseSwapchainImagesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchain == rhs.swapchain )
&& ( imageIndexCount == rhs.imageIndexCount )
&& ( pImageIndices == rhs.pImageIndices );
#endif
}
bool operator!=( ReleaseSwapchainImagesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eReleaseSwapchainImagesInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
uint32_t imageIndexCount = {};
const uint32_t * pImageIndices = {};
};
template <>
struct CppType<StructureType, StructureType::eReleaseSwapchainImagesInfoEXT>
{
using Type = ReleaseSwapchainImagesInfoEXT;
};
// wrapper struct for struct VkRenderPassAttachmentBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassAttachmentBeginInfo.html
struct RenderPassAttachmentBeginInfo
{
using NativeType = VkRenderPassAttachmentBeginInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo(uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, attachmentCount{ attachmentCount_ }, pAttachments{ pAttachments_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassAttachmentBeginInfo( *reinterpret_cast<RenderPassAttachmentBeginInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_, const void * pNext_ = nullptr )
: pNext( pNext_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = attachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassAttachmentBeginInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = static_cast<uint32_t>( attachments_.size() );
pAttachments = attachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassAttachmentBeginInfo*>( this );
}
operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassAttachmentBeginInfo*>( this );
}
operator VkRenderPassAttachmentBeginInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassAttachmentBeginInfo*>( this );
}
operator VkRenderPassAttachmentBeginInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassAttachmentBeginInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageView * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, attachmentCount, pAttachments );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default;
#else
bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( attachmentCount == rhs.attachmentCount )
&& ( pAttachments == rhs.pAttachments );
#endif
}
bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo;
const void * pNext = {};
uint32_t attachmentCount = {};
const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassAttachmentBeginInfo>
{
using Type = RenderPassAttachmentBeginInfo;
};
using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;
// wrapper struct for struct VkRenderPassBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassBeginInfo.html
struct RenderPassBeginInfo
{
using NativeType = VkRenderPassBeginInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, renderPass{ renderPass_ }, framebuffer{ framebuffer_ }, renderArea{ renderArea_ }, clearValueCount{ clearValueCount_ }, pClearValues{ pClearValues_ }
{}
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_, const void * pNext_ = nullptr )
: pNext( pNext_ ), renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( static_cast<uint32_t>( clearValues_.size() ) ), pClearValues( clearValues_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
{
framebuffer = framebuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
{
renderArea = renderArea_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
{
clearValueCount = clearValueCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT
{
pClearValues = pClearValues_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassBeginInfo & setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
{
clearValueCount = static_cast<uint32_t>( clearValues_.size() );
pClearValues = clearValues_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassBeginInfo*>( this );
}
operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassBeginInfo*>( this );
}
operator VkRenderPassBeginInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassBeginInfo*>( this );
}
operator VkRenderPassBeginInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassBeginInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPass const &, VULKAN_HPP_NAMESPACE::Framebuffer const &, VULKAN_HPP_NAMESPACE::Rect2D const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ClearValue * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, renderPass, framebuffer, renderArea, clearValueCount, pClearValues );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassBeginInfo const & ) const = default;
#else
bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( renderPass == rhs.renderPass )
&& ( framebuffer == rhs.framebuffer )
&& ( renderArea == rhs.renderArea )
&& ( clearValueCount == rhs.clearValueCount )
&& ( pClearValues == rhs.pClearValues );
#endif
}
bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
uint32_t clearValueCount = {};
const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
{
using Type = RenderPassBeginInfo;
};
// wrapper struct for struct VkSubpassDescription, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDescription.html
struct SubpassDescription
{
using NativeType = VkSubpassDescription;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubpassDescription(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t * pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT
: flags{ flags_ }, pipelineBindPoint{ pipelineBindPoint_ }, inputAttachmentCount{ inputAttachmentCount_ }, pInputAttachments{ pInputAttachments_ }, colorAttachmentCount{ colorAttachmentCount_ }, pColorAttachments{ pColorAttachments_ }, pResolveAttachments{ pResolveAttachments_ }, pDepthStencilAttachment{ pDepthStencilAttachment_ }, preserveAttachmentCount{ preserveAttachmentCount_ }, pPreserveAttachments{ pPreserveAttachments_ }
{}
VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
: SubpassDescription( *reinterpret_cast<SubpassDescription const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
: flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
#else
if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
inputAttachmentCount = inputAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pInputAttachments = pInputAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
{
inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
pInputAttachments = inputAttachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachments = pColorAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
pColorAttachments = colorAttachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pResolveAttachments = pResolveAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
pResolveAttachments = resolveAttachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pDepthStencilAttachment = pDepthStencilAttachment_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
preserveAttachmentCount = preserveAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pPreserveAttachments = pPreserveAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
pPreserveAttachments = preserveAttachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassDescription*>( this );
}
operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassDescription*>( this );
}
operator VkSubpassDescription const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubpassDescription*>( this );
}
operator VkSubpassDescription *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubpassDescription*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentReference * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentReference * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference * const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( flags, pipelineBindPoint, inputAttachmentCount, pInputAttachments, colorAttachmentCount, pColorAttachments, pResolveAttachments, pDepthStencilAttachment, preserveAttachmentCount, pPreserveAttachments );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubpassDescription const & ) const = default;
#else
bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( flags == rhs.flags )
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
&& ( inputAttachmentCount == rhs.inputAttachmentCount )
&& ( pInputAttachments == rhs.pInputAttachments )
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
&& ( pColorAttachments == rhs.pColorAttachments )
&& ( pResolveAttachments == rhs.pResolveAttachments )
&& ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
&& ( preserveAttachmentCount == rhs.preserveAttachmentCount )
&& ( pPreserveAttachments == rhs.pPreserveAttachments );
#endif
}
bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
uint32_t inputAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {};
uint32_t colorAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {};
uint32_t preserveAttachmentCount = {};
const uint32_t * pPreserveAttachments = {};
};
// wrapper struct for struct VkSubpassDependency, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDependency.html
struct SubpassDependency
{
using NativeType = VkSubpassDependency;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubpassDependency(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}) VULKAN_HPP_NOEXCEPT
: srcSubpass{ srcSubpass_ }, dstSubpass{ dstSubpass_ }, srcStageMask{ srcStageMask_ }, dstStageMask{ dstStageMask_ }, srcAccessMask{ srcAccessMask_ }, dstAccessMask{ dstAccessMask_ }, dependencyFlags{ dependencyFlags_ }
{}
VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
: SubpassDependency( *reinterpret_cast<SubpassDependency const *>( &rhs ) )
{}
SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
{
srcSubpass = srcSubpass_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
{
dstSubpass = dstSubpass_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
{
dependencyFlags = dependencyFlags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassDependency*>( this );
}
operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassDependency*>( this );
}
operator VkSubpassDependency const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubpassDependency*>( this );
}
operator VkSubpassDependency *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubpassDependency*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::DependencyFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubpassDependency const & ) const = default;
#else
bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( srcSubpass == rhs.srcSubpass )
&& ( dstSubpass == rhs.dstSubpass )
&& ( srcStageMask == rhs.srcStageMask )
&& ( dstStageMask == rhs.dstStageMask )
&& ( srcAccessMask == rhs.srcAccessMask )
&& ( dstAccessMask == rhs.dstAccessMask )
&& ( dependencyFlags == rhs.dependencyFlags );
#endif
}
bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t srcSubpass = {};
uint32_t dstSubpass = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
};
// wrapper struct for struct VkRenderPassCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreateInfo.html
struct RenderPassCreateInfo
{
using NativeType = VkRenderPassCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, attachmentCount{ attachmentCount_ }, pAttachments{ pAttachments_ }, subpassCount{ subpassCount_ }, pSubpasses{ pSubpasses_ }, dependencyCount{ dependencyCount_ }, pDependencies{ pDependencies_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassCreateInfo( *reinterpret_cast<RenderPassCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast<uint32_t>( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast<uint32_t>( dependencies_.size() ) ), pDependencies( dependencies_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = attachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = static_cast<uint32_t>( attachments_.size() );
pAttachments = attachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
{
subpassCount = subpassCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
{
pSubpasses = pSubpasses_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassCreateInfo & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
{
subpassCount = static_cast<uint32_t>( subpasses_.size() );
pSubpasses = subpasses_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
{
dependencyCount = dependencyCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT
{
pDependencies = pDependencies_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassCreateInfo & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
{
dependencyCount = static_cast<uint32_t>( dependencies_.size() );
pDependencies = dependencies_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassCreateInfo*>( this );
}
operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassCreateInfo*>( this );
}
operator VkRenderPassCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassCreateInfo*>( this );
}
operator VkRenderPassCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPassCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentDescription * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassDescription * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassDependency * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassCreateInfo const & ) const = default;
#else
bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( attachmentCount == rhs.attachmentCount )
&& ( pAttachments == rhs.pAttachments )
&& ( subpassCount == rhs.subpassCount )
&& ( pSubpasses == rhs.pSubpasses )
&& ( dependencyCount == rhs.dependencyCount )
&& ( pDependencies == rhs.pDependencies );
#endif
}
bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
uint32_t attachmentCount = {};
const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments = {};
uint32_t subpassCount = {};
const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses = {};
uint32_t dependencyCount = {};
const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassCreateInfo>
{
using Type = RenderPassCreateInfo;
};
// wrapper struct for struct VkSubpassDescription2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDescription2.html
struct SubpassDescription2
{
using NativeType = VkSubpassDescription2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubpassDescription2(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t viewMask_ = {}, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t * pPreserveAttachments_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, pipelineBindPoint{ pipelineBindPoint_ }, viewMask{ viewMask_ }, inputAttachmentCount{ inputAttachmentCount_ }, pInputAttachments{ pInputAttachments_ }, colorAttachmentCount{ colorAttachmentCount_ }, pColorAttachments{ pColorAttachments_ }, pResolveAttachments{ pResolveAttachments_ }, pDepthStencilAttachment{ pDepthStencilAttachment_ }, preserveAttachmentCount{ preserveAttachmentCount_ }, pPreserveAttachments{ pPreserveAttachments_ }
{}
VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
: SubpassDescription2( *reinterpret_cast<SubpassDescription2 const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
#else
if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
{
viewMask = viewMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
inputAttachmentCount = inputAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pInputAttachments = pInputAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription2 & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
{
inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
pInputAttachments = inputAttachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachments = pColorAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription2 & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
pColorAttachments = colorAttachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pResolveAttachments = pResolveAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription2 & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
pResolveAttachments = resolveAttachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pDepthStencilAttachment = pDepthStencilAttachment_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
preserveAttachmentCount = preserveAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pPreserveAttachments = pPreserveAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription2 & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
pPreserveAttachments = preserveAttachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassDescription2*>( this );
}
operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassDescription2*>( this );
}
operator VkSubpassDescription2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubpassDescription2*>( this );
}
operator VkSubpassDescription2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubpassDescription2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, pipelineBindPoint, viewMask, inputAttachmentCount, pInputAttachments, colorAttachmentCount, pColorAttachments, pResolveAttachments, pDepthStencilAttachment, preserveAttachmentCount, pPreserveAttachments );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubpassDescription2 const & ) const = default;
#else
bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
&& ( viewMask == rhs.viewMask )
&& ( inputAttachmentCount == rhs.inputAttachmentCount )
&& ( pInputAttachments == rhs.pInputAttachments )
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
&& ( pColorAttachments == rhs.pColorAttachments )
&& ( pResolveAttachments == rhs.pResolveAttachments )
&& ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
&& ( preserveAttachmentCount == rhs.preserveAttachmentCount )
&& ( pPreserveAttachments == rhs.pPreserveAttachments );
#endif
}
bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
uint32_t viewMask = {};
uint32_t inputAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {};
uint32_t colorAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {};
uint32_t preserveAttachmentCount = {};
const uint32_t * pPreserveAttachments = {};
};
template <>
struct CppType<StructureType, StructureType::eSubpassDescription2>
{
using Type = SubpassDescription2;
};
using SubpassDescription2KHR = SubpassDescription2;
// wrapper struct for struct VkSubpassDependency2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDependency2.html
struct SubpassDependency2
{
using NativeType = VkSubpassDependency2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubpassDependency2(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, int32_t viewOffset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcSubpass{ srcSubpass_ }, dstSubpass{ dstSubpass_ }, srcStageMask{ srcStageMask_ }, dstStageMask{ dstStageMask_ }, srcAccessMask{ srcAccessMask_ }, dstAccessMask{ dstAccessMask_ }, dependencyFlags{ dependencyFlags_ }, viewOffset{ viewOffset_ }
{}
VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
: SubpassDependency2( *reinterpret_cast<SubpassDependency2 const *>( &rhs ) )
{}
SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
{
srcSubpass = srcSubpass_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
{
dstSubpass = dstSubpass_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
{
dependencyFlags = dependencyFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
{
viewOffset = viewOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassDependency2*>( this );
}
operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassDependency2*>( this );
}
operator VkSubpassDependency2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubpassDependency2*>( this );
}
operator VkSubpassDependency2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubpassDependency2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::DependencyFlags const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags, viewOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubpassDependency2 const & ) const = default;
#else
bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcSubpass == rhs.srcSubpass )
&& ( dstSubpass == rhs.dstSubpass )
&& ( srcStageMask == rhs.srcStageMask )
&& ( dstStageMask == rhs.dstStageMask )
&& ( srcAccessMask == rhs.srcAccessMask )
&& ( dstAccessMask == rhs.dstAccessMask )
&& ( dependencyFlags == rhs.dependencyFlags )
&& ( viewOffset == rhs.viewOffset );
#endif
}
bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2;
const void * pNext = {};
uint32_t srcSubpass = {};
uint32_t dstSubpass = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
int32_t viewOffset = {};
};
template <>
struct CppType<StructureType, StructureType::eSubpassDependency2>
{
using Type = SubpassDependency2;
};
using SubpassDependency2KHR = SubpassDependency2;
// wrapper struct for struct VkRenderPassCreateInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreateInfo2.html
struct RenderPassCreateInfo2
{
using NativeType = VkRenderPassCreateInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {}, uint32_t correlatedViewMaskCount_ = {}, const uint32_t * pCorrelatedViewMasks_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, attachmentCount{ attachmentCount_ }, pAttachments{ pAttachments_ }, subpassCount{ subpassCount_ }, pSubpasses{ pSubpasses_ }, dependencyCount{ dependencyCount_ }, pDependencies{ pDependencies_ }, correlatedViewMaskCount{ correlatedViewMaskCount_ }, pCorrelatedViewMasks{ pCorrelatedViewMasks_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassCreateInfo2( *reinterpret_cast<RenderPassCreateInfo2 const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast<uint32_t>( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast<uint32_t>( dependencies_.size() ) ), pDependencies( dependencies_.data() ), correlatedViewMaskCount( static_cast<uint32_t>( correlatedViewMasks_.size() ) ), pCorrelatedViewMasks( correlatedViewMasks_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = attachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassCreateInfo2 & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = static_cast<uint32_t>( attachments_.size() );
pAttachments = attachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
{
subpassCount = subpassCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
{
pSubpasses = pSubpasses_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassCreateInfo2 & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
{
subpassCount = static_cast<uint32_t>( subpasses_.size() );
pSubpasses = subpasses_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
{
dependencyCount = dependencyCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT
{
pDependencies = pDependencies_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassCreateInfo2 & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
{
dependencyCount = static_cast<uint32_t>( dependencies_.size() );
pDependencies = dependencies_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
{
correlatedViewMaskCount = correlatedViewMaskCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
{
pCorrelatedViewMasks = pCorrelatedViewMasks_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassCreateInfo2 & setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
{
correlatedViewMaskCount = static_cast<uint32_t>( correlatedViewMasks_.size() );
pCorrelatedViewMasks = correlatedViewMasks_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassCreateInfo2*>( this );
}
operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassCreateInfo2*>( this );
}
operator VkRenderPassCreateInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassCreateInfo2*>( this );
}
operator VkRenderPassCreateInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassCreateInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPassCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassDescription2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassDependency2 * const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies, correlatedViewMaskCount, pCorrelatedViewMasks );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassCreateInfo2 const & ) const = default;
#else
bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( attachmentCount == rhs.attachmentCount )
&& ( pAttachments == rhs.pAttachments )
&& ( subpassCount == rhs.subpassCount )
&& ( pSubpasses == rhs.pSubpasses )
&& ( dependencyCount == rhs.dependencyCount )
&& ( pDependencies == rhs.pDependencies )
&& ( correlatedViewMaskCount == rhs.correlatedViewMaskCount )
&& ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
#endif
}
bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
uint32_t attachmentCount = {};
const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {};
uint32_t subpassCount = {};
const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {};
uint32_t dependencyCount = {};
const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {};
uint32_t correlatedViewMaskCount = {};
const uint32_t * pCorrelatedViewMasks = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassCreateInfo2>
{
using Type = RenderPassCreateInfo2;
};
using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;
// wrapper struct for struct VkRenderPassCreationControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreationControlEXT.html
struct RenderPassCreationControlEXT
{
using NativeType = VkRenderPassCreationControlEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationControlEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT(VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, disallowMerging{ disallowMerging_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassCreationControlEXT( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassCreationControlEXT( *reinterpret_cast<RenderPassCreationControlEXT const *>( &rhs ) )
{}
RenderPassCreationControlEXT & operator=( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassCreationControlEXT & operator=( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setDisallowMerging( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ ) VULKAN_HPP_NOEXCEPT
{
disallowMerging = disallowMerging_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassCreationControlEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassCreationControlEXT*>( this );
}
operator VkRenderPassCreationControlEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassCreationControlEXT*>( this );
}
operator VkRenderPassCreationControlEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassCreationControlEXT*>( this );
}
operator VkRenderPassCreationControlEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassCreationControlEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, disallowMerging );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassCreationControlEXT const & ) const = default;
#else
bool operator==( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( disallowMerging == rhs.disallowMerging );
#endif
}
bool operator!=( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationControlEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 disallowMerging = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassCreationControlEXT>
{
using Type = RenderPassCreationControlEXT;
};
// wrapper struct for struct VkRenderPassCreationFeedbackInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreationFeedbackInfoEXT.html
struct RenderPassCreationFeedbackInfoEXT
{
using NativeType = VkRenderPassCreationFeedbackInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT(uint32_t postMergeSubpassCount_ = {}) VULKAN_HPP_NOEXCEPT
: postMergeSubpassCount{ postMergeSubpassCount_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassCreationFeedbackInfoEXT( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassCreationFeedbackInfoEXT( *reinterpret_cast<RenderPassCreationFeedbackInfoEXT const *>( &rhs ) )
{}
RenderPassCreationFeedbackInfoEXT & operator=( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassCreationFeedbackInfoEXT & operator=( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT const *>( &rhs );
return *this;
}
operator VkRenderPassCreationFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassCreationFeedbackInfoEXT*>( this );
}
operator VkRenderPassCreationFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassCreationFeedbackInfoEXT*>( this );
}
operator VkRenderPassCreationFeedbackInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassCreationFeedbackInfoEXT*>( this );
}
operator VkRenderPassCreationFeedbackInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassCreationFeedbackInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( postMergeSubpassCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassCreationFeedbackInfoEXT const & ) const = default;
#else
bool operator==( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( postMergeSubpassCount == rhs.postMergeSubpassCount );
#endif
}
bool operator!=( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t postMergeSubpassCount = {};
};
// wrapper struct for struct VkRenderPassCreationFeedbackCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreationFeedbackCreateInfoEXT.html
struct RenderPassCreationFeedbackCreateInfoEXT
{
using NativeType = VkRenderPassCreationFeedbackCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pRenderPassFeedback{ pRenderPassFeedback_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassCreationFeedbackCreateInfoEXT( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassCreationFeedbackCreateInfoEXT( *reinterpret_cast<RenderPassCreationFeedbackCreateInfoEXT const *>( &rhs ) )
{}
RenderPassCreationFeedbackCreateInfoEXT & operator=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassCreationFeedbackCreateInfoEXT & operator=( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & setPRenderPassFeedback( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ ) VULKAN_HPP_NOEXCEPT
{
pRenderPassFeedback = pRenderPassFeedback_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassCreationFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassCreationFeedbackCreateInfoEXT*>( this );
}
operator VkRenderPassCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassCreationFeedbackCreateInfoEXT*>( this );
}
operator VkRenderPassCreationFeedbackCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassCreationFeedbackCreateInfoEXT*>( this );
}
operator VkRenderPassCreationFeedbackCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassCreationFeedbackCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pRenderPassFeedback );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassCreationFeedbackCreateInfoEXT const & ) const = default;
#else
bool operator==( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pRenderPassFeedback == rhs.pRenderPassFeedback );
#endif
}
bool operator!=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassCreationFeedbackCreateInfoEXT>
{
using Type = RenderPassCreationFeedbackCreateInfoEXT;
};
// wrapper struct for struct VkRenderPassFragmentDensityMapCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassFragmentDensityMapCreateInfoEXT.html
struct RenderPassFragmentDensityMapCreateInfoEXT
{
using NativeType = VkRenderPassFragmentDensityMapCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fragmentDensityMapAttachment{ fragmentDensityMapAttachment_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassFragmentDensityMapCreateInfoEXT( *reinterpret_cast<RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs ) )
{}
RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMapAttachment = fragmentDensityMapAttachment_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
}
operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
}
operator VkRenderPassFragmentDensityMapCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
}
operator VkRenderPassFragmentDensityMapCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AttachmentReference const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fragmentDensityMapAttachment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default;
#else
bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment );
#endif
}
bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassFragmentDensityMapCreateInfoEXT>
{
using Type = RenderPassFragmentDensityMapCreateInfoEXT;
};
// wrapper struct for struct VkRenderPassFragmentDensityMapOffsetEndInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassFragmentDensityMapOffsetEndInfoEXT.html
struct RenderPassFragmentDensityMapOffsetEndInfoEXT
{
using NativeType = VkRenderPassFragmentDensityMapOffsetEndInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapOffsetEndInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapOffsetEndInfoEXT(uint32_t fragmentDensityOffsetCount_ = {}, const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fragmentDensityOffsetCount{ fragmentDensityOffsetCount_ }, pFragmentDensityOffsets{ pFragmentDensityOffsets_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapOffsetEndInfoEXT( RenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassFragmentDensityMapOffsetEndInfoEXT( VkRenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassFragmentDensityMapOffsetEndInfoEXT( *reinterpret_cast<RenderPassFragmentDensityMapOffsetEndInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassFragmentDensityMapOffsetEndInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Offset2D> const & fragmentDensityOffsets_, const void * pNext_ = nullptr )
: pNext( pNext_ ), fragmentDensityOffsetCount( static_cast<uint32_t>( fragmentDensityOffsets_.size() ) ), pFragmentDensityOffsets( fragmentDensityOffsets_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderPassFragmentDensityMapOffsetEndInfoEXT & operator=( RenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassFragmentDensityMapOffsetEndInfoEXT & operator=( VkRenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapOffsetEndInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapOffsetEndInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapOffsetEndInfoEXT & setFragmentDensityOffsetCount( uint32_t fragmentDensityOffsetCount_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityOffsetCount = fragmentDensityOffsetCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapOffsetEndInfoEXT & setPFragmentDensityOffsets( const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT
{
pFragmentDensityOffsets = pFragmentDensityOffsets_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassFragmentDensityMapOffsetEndInfoEXT & setFragmentDensityOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Offset2D> const & fragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityOffsetCount = static_cast<uint32_t>( fragmentDensityOffsets_.size() );
pFragmentDensityOffsets = fragmentDensityOffsets_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassFragmentDensityMapOffsetEndInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassFragmentDensityMapOffsetEndInfoEXT*>( this );
}
operator VkRenderPassFragmentDensityMapOffsetEndInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassFragmentDensityMapOffsetEndInfoEXT*>( this );
}
operator VkRenderPassFragmentDensityMapOffsetEndInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassFragmentDensityMapOffsetEndInfoEXT*>( this );
}
operator VkRenderPassFragmentDensityMapOffsetEndInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassFragmentDensityMapOffsetEndInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Offset2D * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fragmentDensityOffsetCount, pFragmentDensityOffsets );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassFragmentDensityMapOffsetEndInfoEXT const & ) const = default;
#else
bool operator==( RenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fragmentDensityOffsetCount == rhs.fragmentDensityOffsetCount )
&& ( pFragmentDensityOffsets == rhs.pFragmentDensityOffsets );
#endif
}
bool operator!=( RenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapOffsetEndInfoEXT;
const void * pNext = {};
uint32_t fragmentDensityOffsetCount = {};
const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassFragmentDensityMapOffsetEndInfoEXT>
{
using Type = RenderPassFragmentDensityMapOffsetEndInfoEXT;
};
using SubpassFragmentDensityMapOffsetEndInfoQCOM = RenderPassFragmentDensityMapOffsetEndInfoEXT;
// wrapper struct for struct VkRenderPassInputAttachmentAspectCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassInputAttachmentAspectCreateInfo.html
struct RenderPassInputAttachmentAspectCreateInfo
{
using NativeType = VkRenderPassInputAttachmentAspectCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(uint32_t aspectReferenceCount_ = {}, const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, aspectReferenceCount{ aspectReferenceCount_ }, pAspectReferences{ pAspectReferences_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassInputAttachmentAspectCreateInfo( *reinterpret_cast<RenderPassInputAttachmentAspectCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassInputAttachmentAspectCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_, const void * pNext_ = nullptr )
: pNext( pNext_ ), aspectReferenceCount( static_cast<uint32_t>( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT
{
aspectReferenceCount = aspectReferenceCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
{
pAspectReferences = pAspectReferences_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT
{
aspectReferenceCount = static_cast<uint32_t>( aspectReferences_.size() );
pAspectReferences = aspectReferences_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo*>( this );
}
operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo*>( this );
}
operator VkRenderPassInputAttachmentAspectCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo*>( this );
}
operator VkRenderPassInputAttachmentAspectCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, aspectReferenceCount, pAspectReferences );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default;
#else
bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( aspectReferenceCount == rhs.aspectReferenceCount )
&& ( pAspectReferences == rhs.pAspectReferences );
#endif
}
bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
const void * pNext = {};
uint32_t aspectReferenceCount = {};
const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassInputAttachmentAspectCreateInfo>
{
using Type = RenderPassInputAttachmentAspectCreateInfo;
};
using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
// wrapper struct for struct VkRenderPassMultiviewCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassMultiviewCreateInfo.html
struct RenderPassMultiviewCreateInfo
{
using NativeType = VkRenderPassMultiviewCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo(uint32_t subpassCount_ = {}, const uint32_t * pViewMasks_ = {}, uint32_t dependencyCount_ = {}, const int32_t * pViewOffsets_ = {}, uint32_t correlationMaskCount_ = {}, const uint32_t * pCorrelationMasks_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, subpassCount{ subpassCount_ }, pViewMasks{ pViewMasks_ }, dependencyCount{ dependencyCount_ }, pViewOffsets{ pViewOffsets_ }, correlationMaskCount{ correlationMaskCount_ }, pCorrelationMasks{ pCorrelationMasks_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassMultiviewCreateInfo( *reinterpret_cast<RenderPassMultiviewCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), subpassCount( static_cast<uint32_t>( viewMasks_.size() ) ), pViewMasks( viewMasks_.data() ), dependencyCount( static_cast<uint32_t>( viewOffsets_.size() ) ), pViewOffsets( viewOffsets_.data() ), correlationMaskCount( static_cast<uint32_t>( correlationMasks_.size() ) ), pCorrelationMasks( correlationMasks_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
{
subpassCount = subpassCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT
{
pViewMasks = pViewMasks_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_ ) VULKAN_HPP_NOEXCEPT
{
subpassCount = static_cast<uint32_t>( viewMasks_.size() );
pViewMasks = viewMasks_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
{
dependencyCount = dependencyCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT
{
pViewOffsets = pViewOffsets_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT
{
dependencyCount = static_cast<uint32_t>( viewOffsets_.size() );
pViewOffsets = viewOffsets_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT
{
correlationMaskCount = correlationMaskCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT
{
pCorrelationMasks = pCorrelationMasks_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassMultiviewCreateInfo & setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT
{
correlationMaskCount = static_cast<uint32_t>( correlationMasks_.size() );
pCorrelationMasks = correlationMasks_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo*>( this );
}
operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassMultiviewCreateInfo*>( this );
}
operator VkRenderPassMultiviewCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassMultiviewCreateInfo*>( this );
}
operator VkRenderPassMultiviewCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassMultiviewCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const int32_t * const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, subpassCount, pViewMasks, dependencyCount, pViewOffsets, correlationMaskCount, pCorrelationMasks );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default;
#else
bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( subpassCount == rhs.subpassCount )
&& ( pViewMasks == rhs.pViewMasks )
&& ( dependencyCount == rhs.dependencyCount )
&& ( pViewOffsets == rhs.pViewOffsets )
&& ( correlationMaskCount == rhs.correlationMaskCount )
&& ( pCorrelationMasks == rhs.pCorrelationMasks );
#endif
}
bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo;
const void * pNext = {};
uint32_t subpassCount = {};
const uint32_t * pViewMasks = {};
uint32_t dependencyCount = {};
const int32_t * pViewOffsets = {};
uint32_t correlationMaskCount = {};
const uint32_t * pCorrelationMasks = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassMultiviewCreateInfo>
{
using Type = RenderPassMultiviewCreateInfo;
};
using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
// wrapper struct for struct VkSubpassSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassSampleLocationsEXT.html
struct SubpassSampleLocationsEXT
{
using NativeType = VkSubpassSampleLocationsEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT(uint32_t subpassIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
: subpassIndex{ subpassIndex_ }, sampleLocationsInfo{ sampleLocationsInfo_ }
{}
VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SubpassSampleLocationsEXT( *reinterpret_cast<SubpassSampleLocationsEXT const *>( &rhs ) )
{}
SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT
{
subpassIndex = subpassIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsInfo = sampleLocationsInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassSampleLocationsEXT*>( this );
}
operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassSampleLocationsEXT*>( this );
}
operator VkSubpassSampleLocationsEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubpassSampleLocationsEXT*>( this );
}
operator VkSubpassSampleLocationsEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubpassSampleLocationsEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( subpassIndex, sampleLocationsInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubpassSampleLocationsEXT const & ) const = default;
#else
bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( subpassIndex == rhs.subpassIndex )
&& ( sampleLocationsInfo == rhs.sampleLocationsInfo );
#endif
}
bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t subpassIndex = {};
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
};
// wrapper struct for struct VkRenderPassSampleLocationsBeginInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassSampleLocationsBeginInfoEXT.html
struct RenderPassSampleLocationsBeginInfoEXT
{
using NativeType = VkRenderPassSampleLocationsBeginInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT(uint32_t attachmentInitialSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {}, uint32_t postSubpassSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, attachmentInitialSampleLocationsCount{ attachmentInitialSampleLocationsCount_ }, pAttachmentInitialSampleLocations{ pAttachmentInitialSampleLocations_ }, postSubpassSampleLocationsCount{ postSubpassSampleLocationsCount_ }, pPostSubpassSampleLocations{ pPostSubpassSampleLocations_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassSampleLocationsBeginInfoEXT( *reinterpret_cast<RenderPassSampleLocationsBeginInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassSampleLocationsBeginInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), attachmentInitialSampleLocationsCount( static_cast<uint32_t>( attachmentInitialSampleLocations_.size() ) ), pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ), postSubpassSampleLocationsCount( static_cast<uint32_t>( postSubpassSampleLocations_.size() ) ), pPostSubpassSampleLocations( postSubpassSampleLocations_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
attachmentInitialSampleLocationsCount = static_cast<uint32_t>( attachmentInitialSampleLocations_.size() );
pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
{
postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
postSubpassSampleLocationsCount = static_cast<uint32_t>( postSubpassSampleLocations_.size() );
pPostSubpassSampleLocations = postSubpassSampleLocations_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT*>( this );
}
operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT*>( this );
}
operator VkRenderPassSampleLocationsBeginInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT*>( this );
}
operator VkRenderPassSampleLocationsBeginInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, attachmentInitialSampleLocationsCount, pAttachmentInitialSampleLocations, postSubpassSampleLocationsCount, pPostSubpassSampleLocations );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default;
#else
bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount )
&& ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations )
&& ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount )
&& ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
#endif
}
bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
const void * pNext = {};
uint32_t attachmentInitialSampleLocationsCount = {};
const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {};
uint32_t postSubpassSampleLocationsCount = {};
const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassSampleLocationsBeginInfoEXT>
{
using Type = RenderPassSampleLocationsBeginInfoEXT;
};
// wrapper struct for struct VkRenderPassStripeInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassStripeInfoARM.html
struct RenderPassStripeInfoARM
{
using NativeType = VkRenderPassStripeInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassStripeInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassStripeInfoARM(VULKAN_HPP_NAMESPACE::Rect2D stripeArea_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stripeArea{ stripeArea_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassStripeInfoARM( RenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassStripeInfoARM( VkRenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassStripeInfoARM( *reinterpret_cast<RenderPassStripeInfoARM const *>( &rhs ) )
{}
RenderPassStripeInfoARM & operator=( RenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassStripeInfoARM & operator=( VkRenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassStripeInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassStripeInfoARM & setStripeArea( VULKAN_HPP_NAMESPACE::Rect2D const & stripeArea_ ) VULKAN_HPP_NOEXCEPT
{
stripeArea = stripeArea_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassStripeInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassStripeInfoARM*>( this );
}
operator VkRenderPassStripeInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassStripeInfoARM*>( this );
}
operator VkRenderPassStripeInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassStripeInfoARM*>( this );
}
operator VkRenderPassStripeInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassStripeInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Rect2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stripeArea );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassStripeInfoARM const & ) const = default;
#else
bool operator==( RenderPassStripeInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stripeArea == rhs.stripeArea );
#endif
}
bool operator!=( RenderPassStripeInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassStripeInfoARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Rect2D stripeArea = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassStripeInfoARM>
{
using Type = RenderPassStripeInfoARM;
};
// wrapper struct for struct VkRenderPassStripeBeginInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassStripeBeginInfoARM.html
struct RenderPassStripeBeginInfoARM
{
using NativeType = VkRenderPassStripeBeginInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassStripeBeginInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassStripeBeginInfoARM(uint32_t stripeInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stripeInfoCount{ stripeInfoCount_ }, pStripeInfos{ pStripeInfos_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassStripeBeginInfoARM( RenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassStripeBeginInfoARM( VkRenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassStripeBeginInfoARM( *reinterpret_cast<RenderPassStripeBeginInfoARM const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassStripeBeginInfoARM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM> const & stripeInfos_, const void * pNext_ = nullptr )
: pNext( pNext_ ), stripeInfoCount( static_cast<uint32_t>( stripeInfos_.size() ) ), pStripeInfos( stripeInfos_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderPassStripeBeginInfoARM & operator=( RenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassStripeBeginInfoARM & operator=( VkRenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassStripeBeginInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & setStripeInfoCount( uint32_t stripeInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
stripeInfoCount = stripeInfoCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & setPStripeInfos( const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos_ ) VULKAN_HPP_NOEXCEPT
{
pStripeInfos = pStripeInfos_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassStripeBeginInfoARM & setStripeInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM> const & stripeInfos_ ) VULKAN_HPP_NOEXCEPT
{
stripeInfoCount = static_cast<uint32_t>( stripeInfos_.size() );
pStripeInfos = stripeInfos_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassStripeBeginInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassStripeBeginInfoARM*>( this );
}
operator VkRenderPassStripeBeginInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassStripeBeginInfoARM*>( this );
}
operator VkRenderPassStripeBeginInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassStripeBeginInfoARM*>( this );
}
operator VkRenderPassStripeBeginInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassStripeBeginInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stripeInfoCount, pStripeInfos );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassStripeBeginInfoARM const & ) const = default;
#else
bool operator==( RenderPassStripeBeginInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stripeInfoCount == rhs.stripeInfoCount )
&& ( pStripeInfos == rhs.pStripeInfos );
#endif
}
bool operator!=( RenderPassStripeBeginInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassStripeBeginInfoARM;
const void * pNext = {};
uint32_t stripeInfoCount = {};
const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassStripeBeginInfoARM>
{
using Type = RenderPassStripeBeginInfoARM;
};
// wrapper struct for struct VkSemaphoreSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreSubmitInfo.html
struct SemaphoreSubmitInfo
{
using NativeType = VkSemaphoreSubmitInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {}, uint32_t deviceIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, semaphore{ semaphore_ }, value{ value_ }, stageMask{ stageMask_ }, deviceIndex{ deviceIndex_ }
{}
VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreSubmitInfo( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SemaphoreSubmitInfo( *reinterpret_cast<SemaphoreSubmitInfo const *>( &rhs ) )
{}
SemaphoreSubmitInfo & operator=( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SemaphoreSubmitInfo & operator=( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
{
value = value_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ ) VULKAN_HPP_NOEXCEPT
{
stageMask = stageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndex = deviceIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreSubmitInfo*>( this );
}
operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreSubmitInfo*>( this );
}
operator VkSemaphoreSubmitInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSemaphoreSubmitInfo*>( this );
}
operator VkSemaphoreSubmitInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSemaphoreSubmitInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, uint64_t const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, semaphore, value, stageMask, deviceIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SemaphoreSubmitInfo const & ) const = default;
#else
bool operator==( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
&& ( value == rhs.value )
&& ( stageMask == rhs.stageMask )
&& ( deviceIndex == rhs.deviceIndex );
#endif
}
bool operator!=( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
uint64_t value = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask = {};
uint32_t deviceIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eSemaphoreSubmitInfo>
{
using Type = SemaphoreSubmitInfo;
};
using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo;
// wrapper struct for struct VkRenderPassStripeSubmitInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassStripeSubmitInfoARM.html
struct RenderPassStripeSubmitInfoARM
{
using NativeType = VkRenderPassStripeSubmitInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassStripeSubmitInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassStripeSubmitInfoARM(uint32_t stripeSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stripeSemaphoreInfoCount{ stripeSemaphoreInfoCount_ }, pStripeSemaphoreInfos{ pStripeSemaphoreInfos_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassStripeSubmitInfoARM( RenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassStripeSubmitInfoARM( VkRenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassStripeSubmitInfoARM( *reinterpret_cast<RenderPassStripeSubmitInfoARM const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassStripeSubmitInfoARM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & stripeSemaphoreInfos_, const void * pNext_ = nullptr )
: pNext( pNext_ ), stripeSemaphoreInfoCount( static_cast<uint32_t>( stripeSemaphoreInfos_.size() ) ), pStripeSemaphoreInfos( stripeSemaphoreInfos_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderPassStripeSubmitInfoARM & operator=( RenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassStripeSubmitInfoARM & operator=( VkRenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassStripeSubmitInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & setStripeSemaphoreInfoCount( uint32_t stripeSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
stripeSemaphoreInfoCount = stripeSemaphoreInfoCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & setPStripeSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
{
pStripeSemaphoreInfos = pStripeSemaphoreInfos_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassStripeSubmitInfoARM & setStripeSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & stripeSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
{
stripeSemaphoreInfoCount = static_cast<uint32_t>( stripeSemaphoreInfos_.size() );
pStripeSemaphoreInfos = stripeSemaphoreInfos_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassStripeSubmitInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassStripeSubmitInfoARM*>( this );
}
operator VkRenderPassStripeSubmitInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassStripeSubmitInfoARM*>( this );
}
operator VkRenderPassStripeSubmitInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassStripeSubmitInfoARM*>( this );
}
operator VkRenderPassStripeSubmitInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassStripeSubmitInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stripeSemaphoreInfoCount, pStripeSemaphoreInfos );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassStripeSubmitInfoARM const & ) const = default;
#else
bool operator==( RenderPassStripeSubmitInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stripeSemaphoreInfoCount == rhs.stripeSemaphoreInfoCount )
&& ( pStripeSemaphoreInfos == rhs.pStripeSemaphoreInfos );
#endif
}
bool operator!=( RenderPassStripeSubmitInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassStripeSubmitInfoARM;
const void * pNext = {};
uint32_t stripeSemaphoreInfoCount = {};
const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassStripeSubmitInfoARM>
{
using Type = RenderPassStripeSubmitInfoARM;
};
// wrapper struct for struct VkRenderPassSubpassFeedbackInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassSubpassFeedbackInfoEXT.html
struct RenderPassSubpassFeedbackInfoEXT
{
using NativeType = VkRenderPassSubpassFeedbackInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackInfoEXT(VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus_ = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, uint32_t postMergeIndex_ = {}) VULKAN_HPP_NOEXCEPT
: subpassMergeStatus{ subpassMergeStatus_ }, description{ description_ }, postMergeIndex{ postMergeIndex_ }
{}
VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackInfoEXT( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassSubpassFeedbackInfoEXT( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassSubpassFeedbackInfoEXT( *reinterpret_cast<RenderPassSubpassFeedbackInfoEXT const *>( &rhs ) )
{}
RenderPassSubpassFeedbackInfoEXT & operator=( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassSubpassFeedbackInfoEXT & operator=( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT const *>( &rhs );
return *this;
}
operator VkRenderPassSubpassFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassSubpassFeedbackInfoEXT*>( this );
}
operator VkRenderPassSubpassFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassSubpassFeedbackInfoEXT*>( this );
}
operator VkRenderPassSubpassFeedbackInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassSubpassFeedbackInfoEXT*>( this );
}
operator VkRenderPassSubpassFeedbackInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassSubpassFeedbackInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( subpassMergeStatus, description, postMergeIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = subpassMergeStatus <=> rhs.subpassMergeStatus; cmp != 0 ) return cmp;
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = postMergeIndex <=> rhs.postMergeIndex; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( subpassMergeStatus == rhs.subpassMergeStatus )
&& ( strcmp( description, rhs.description ) == 0 )
&& ( postMergeIndex == rhs.postMergeIndex );
}
bool operator!=( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
uint32_t postMergeIndex = {};
};
// wrapper struct for struct VkRenderPassSubpassFeedbackCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassSubpassFeedbackCreateInfoEXT.html
struct RenderPassSubpassFeedbackCreateInfoEXT
{
using NativeType = VkRenderPassSubpassFeedbackCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pSubpassFeedback{ pSubpassFeedback_ }
{}
VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassSubpassFeedbackCreateInfoEXT( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassSubpassFeedbackCreateInfoEXT( *reinterpret_cast<RenderPassSubpassFeedbackCreateInfoEXT const *>( &rhs ) )
{}
RenderPassSubpassFeedbackCreateInfoEXT & operator=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassSubpassFeedbackCreateInfoEXT & operator=( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & setPSubpassFeedback( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ ) VULKAN_HPP_NOEXCEPT
{
pSubpassFeedback = pSubpassFeedback_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassSubpassFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassSubpassFeedbackCreateInfoEXT*>( this );
}
operator VkRenderPassSubpassFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassSubpassFeedbackCreateInfoEXT*>( this );
}
operator VkRenderPassSubpassFeedbackCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassSubpassFeedbackCreateInfoEXT*>( this );
}
operator VkRenderPassSubpassFeedbackCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassSubpassFeedbackCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pSubpassFeedback );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassSubpassFeedbackCreateInfoEXT const & ) const = default;
#else
bool operator==( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pSubpassFeedback == rhs.pSubpassFeedback );
#endif
}
bool operator!=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassSubpassFeedbackCreateInfoEXT>
{
using Type = RenderPassSubpassFeedbackCreateInfoEXT;
};
// wrapper struct for struct VkRenderPassTileShadingCreateInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassTileShadingCreateInfoQCOM.html
struct RenderPassTileShadingCreateInfoQCOM
{
using NativeType = VkRenderPassTileShadingCreateInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTileShadingCreateInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassTileShadingCreateInfoQCOM(VULKAN_HPP_NAMESPACE::TileShadingRenderPassFlagsQCOM flags_ = {}, VULKAN_HPP_NAMESPACE::Extent2D tileApronSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, tileApronSize{ tileApronSize_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassTileShadingCreateInfoQCOM( RenderPassTileShadingCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassTileShadingCreateInfoQCOM( VkRenderPassTileShadingCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassTileShadingCreateInfoQCOM( *reinterpret_cast<RenderPassTileShadingCreateInfoQCOM const *>( &rhs ) )
{}
RenderPassTileShadingCreateInfoQCOM & operator=( RenderPassTileShadingCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassTileShadingCreateInfoQCOM & operator=( VkRenderPassTileShadingCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassTileShadingCreateInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassTileShadingCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassTileShadingCreateInfoQCOM & setFlags( VULKAN_HPP_NAMESPACE::TileShadingRenderPassFlagsQCOM flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassTileShadingCreateInfoQCOM & setTileApronSize( VULKAN_HPP_NAMESPACE::Extent2D const & tileApronSize_ ) VULKAN_HPP_NOEXCEPT
{
tileApronSize = tileApronSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassTileShadingCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassTileShadingCreateInfoQCOM*>( this );
}
operator VkRenderPassTileShadingCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassTileShadingCreateInfoQCOM*>( this );
}
operator VkRenderPassTileShadingCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassTileShadingCreateInfoQCOM*>( this );
}
operator VkRenderPassTileShadingCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassTileShadingCreateInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TileShadingRenderPassFlagsQCOM const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, tileApronSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassTileShadingCreateInfoQCOM const & ) const = default;
#else
bool operator==( RenderPassTileShadingCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( tileApronSize == rhs.tileApronSize );
#endif
}
bool operator!=( RenderPassTileShadingCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTileShadingCreateInfoQCOM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TileShadingRenderPassFlagsQCOM flags = {};
VULKAN_HPP_NAMESPACE::Extent2D tileApronSize = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderPassTileShadingCreateInfoQCOM>
{
using Type = RenderPassTileShadingCreateInfoQCOM;
};
// wrapper struct for struct VkRenderPassTransformBeginInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassTransformBeginInfoQCOM.html
struct RenderPassTransformBeginInfoQCOM
{
using NativeType = VkRenderPassTransformBeginInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, transform{ transform_ }
{}
VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassTransformBeginInfoQCOM( *reinterpret_cast<RenderPassTransformBeginInfoQCOM const *>( &rhs ) )
{}
RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
{
transform = transform_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassTransformBeginInfoQCOM*>( this );
}
operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassTransformBeginInfoQCOM*>( this );
}
operator VkRenderPassTransformBeginInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderPassTransformBeginInfoQCOM*>( this );
}
operator VkRenderPassTransformBeginInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderPassTransformBeginInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, transform );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default;
#else
bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( transform == rhs.transform );
#endif
}
bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
};
template <>
struct CppType<StructureType, StructureType::eRenderPassTransformBeginInfoQCOM>
{
using Type = RenderPassTransformBeginInfoQCOM;
};
// wrapper struct for struct VkRenderingAreaInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingAreaInfo.html
struct RenderingAreaInfo
{
using NativeType = VkRenderingAreaInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAreaInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderingAreaInfo(uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, viewMask{ viewMask_ }, colorAttachmentCount{ colorAttachmentCount_ }, pColorAttachmentFormats{ pColorAttachmentFormats_ }, depthAttachmentFormat{ depthAttachmentFormat_ }, stencilAttachmentFormat{ stencilAttachmentFormat_ }
{}
VULKAN_HPP_CONSTEXPR RenderingAreaInfo( RenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderingAreaInfo( VkRenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderingAreaInfo( *reinterpret_cast<RenderingAreaInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderingAreaInfo( uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr )
: pNext( pNext_ ), viewMask( viewMask_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) ), pColorAttachmentFormats( colorAttachmentFormats_.data() ), depthAttachmentFormat( depthAttachmentFormat_ ), stencilAttachmentFormat( stencilAttachmentFormat_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderingAreaInfo & operator=( RenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderingAreaInfo & operator=( VkRenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingAreaInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
{
viewMask = viewMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachmentFormats = pColorAttachmentFormats_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderingAreaInfo & setColorAttachmentFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
pColorAttachmentFormats = colorAttachmentFormats_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
{
depthAttachmentFormat = depthAttachmentFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
{
stencilAttachmentFormat = stencilAttachmentFormat_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderingAreaInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderingAreaInfo*>( this );
}
operator VkRenderingAreaInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderingAreaInfo*>( this );
}
operator VkRenderingAreaInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderingAreaInfo*>( this );
}
operator VkRenderingAreaInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderingAreaInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Format const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderingAreaInfo const & ) const = default;
#else
bool operator==( RenderingAreaInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( viewMask == rhs.viewMask )
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
&& ( pColorAttachmentFormats == rhs.pColorAttachmentFormats )
&& ( depthAttachmentFormat == rhs.depthAttachmentFormat )
&& ( stencilAttachmentFormat == rhs.stencilAttachmentFormat );
#endif
}
bool operator!=( RenderingAreaInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAreaInfo;
const void * pNext = {};
uint32_t viewMask = {};
uint32_t colorAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {};
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::eRenderingAreaInfo>
{
using Type = RenderingAreaInfo;
};
using RenderingAreaInfoKHR = RenderingAreaInfo;
// wrapper struct for struct VkRenderingAttachmentInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingAttachmentInfo.html
struct RenderingAttachmentInfo
{
using NativeType = VkRenderingAttachmentInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageView{ imageView_ }, imageLayout{ imageLayout_ }, resolveMode{ resolveMode_ }, resolveImageView{ resolveImageView_ }, resolveImageLayout{ resolveImageLayout_ }, loadOp{ loadOp_ }, storeOp{ storeOp_ }, clearValue{ clearValue_ }
{}
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderingAttachmentInfo( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderingAttachmentInfo( *reinterpret_cast<RenderingAttachmentInfo const *>( &rhs ) )
{}
RenderingAttachmentInfo & operator=( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderingAttachmentInfo & operator=( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
{
imageLayout = imageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT
{
resolveMode = resolveMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageView( VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ ) VULKAN_HPP_NOEXCEPT
{
resolveImageView = resolveImageView_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
resolveImageLayout = resolveImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
{
loadOp = loadOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
{
storeOp = storeOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
{
clearValue = clearValue_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderingAttachmentInfo*>( this );
}
operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderingAttachmentInfo*>( this );
}
operator VkRenderingAttachmentInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderingAttachmentInfo*>( this );
}
operator VkRenderingAttachmentInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderingAttachmentInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::ClearValue const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageView, imageLayout, resolveMode, resolveImageView, resolveImageLayout, loadOp, storeOp, clearValue );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
VULKAN_HPP_NAMESPACE::ImageView resolveImageView = {};
VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderingAttachmentInfo>
{
using Type = RenderingAttachmentInfo;
};
using RenderingAttachmentInfoKHR = RenderingAttachmentInfo;
// wrapper struct for struct VkRenderingAttachmentLocationInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingAttachmentLocationInfo.html
struct RenderingAttachmentLocationInfo
{
using NativeType = VkRenderingAttachmentLocationInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentLocationInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfo(uint32_t colorAttachmentCount_ = {}, const uint32_t * pColorAttachmentLocations_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, colorAttachmentCount{ colorAttachmentCount_ }, pColorAttachmentLocations{ pColorAttachmentLocations_ }
{}
VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfo( RenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderingAttachmentLocationInfo( VkRenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderingAttachmentLocationInfo( *reinterpret_cast<RenderingAttachmentLocationInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderingAttachmentLocationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & colorAttachmentLocations_, const void * pNext_ = nullptr )
: pNext( pNext_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachmentLocations_.size() ) ), pColorAttachmentLocations( colorAttachmentLocations_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderingAttachmentLocationInfo & operator=( RenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderingAttachmentLocationInfo & operator=( VkRenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setPColorAttachmentLocations( const uint32_t * pColorAttachmentLocations_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachmentLocations = pColorAttachmentLocations_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderingAttachmentLocationInfo & setColorAttachmentLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & colorAttachmentLocations_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentLocations_.size() );
pColorAttachmentLocations = colorAttachmentLocations_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderingAttachmentLocationInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderingAttachmentLocationInfo*>( this );
}
operator VkRenderingAttachmentLocationInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderingAttachmentLocationInfo*>( this );
}
operator VkRenderingAttachmentLocationInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderingAttachmentLocationInfo*>( this );
}
operator VkRenderingAttachmentLocationInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderingAttachmentLocationInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentLocations );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderingAttachmentLocationInfo const & ) const = default;
#else
bool operator==( RenderingAttachmentLocationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
&& ( pColorAttachmentLocations == rhs.pColorAttachmentLocations );
#endif
}
bool operator!=( RenderingAttachmentLocationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentLocationInfo;
const void * pNext = {};
uint32_t colorAttachmentCount = {};
const uint32_t * pColorAttachmentLocations = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderingAttachmentLocationInfo>
{
using Type = RenderingAttachmentLocationInfo;
};
using RenderingAttachmentLocationInfoKHR = RenderingAttachmentLocationInfo;
// wrapper struct for struct VkRenderingEndInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingEndInfoEXT.html
struct RenderingEndInfoEXT
{
using NativeType = VkRenderingEndInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingEndInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderingEndInfoEXT(const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }
{}
VULKAN_HPP_CONSTEXPR RenderingEndInfoEXT( RenderingEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderingEndInfoEXT( VkRenderingEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderingEndInfoEXT( *reinterpret_cast<RenderingEndInfoEXT const *>( &rhs ) )
{}
RenderingEndInfoEXT & operator=( RenderingEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderingEndInfoEXT & operator=( VkRenderingEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingEndInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderingEndInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderingEndInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderingEndInfoEXT*>( this );
}
operator VkRenderingEndInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderingEndInfoEXT*>( this );
}
operator VkRenderingEndInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderingEndInfoEXT*>( this );
}
operator VkRenderingEndInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderingEndInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderingEndInfoEXT const & ) const = default;
#else
bool operator==( RenderingEndInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext );
#endif
}
bool operator!=( RenderingEndInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingEndInfoEXT;
const void * pNext = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderingEndInfoEXT>
{
using Type = RenderingEndInfoEXT;
};
// wrapper struct for struct VkRenderingFragmentDensityMapAttachmentInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingFragmentDensityMapAttachmentInfoEXT.html
struct RenderingFragmentDensityMapAttachmentInfoEXT
{
using NativeType = VkRenderingFragmentDensityMapAttachmentInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageView{ imageView_ }, imageLayout{ imageLayout_ }
{}
VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderingFragmentDensityMapAttachmentInfoEXT( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderingFragmentDensityMapAttachmentInfoEXT( *reinterpret_cast<RenderingFragmentDensityMapAttachmentInfoEXT const *>( &rhs ) )
{}
RenderingFragmentDensityMapAttachmentInfoEXT & operator=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderingFragmentDensityMapAttachmentInfoEXT & operator=( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
{
imageLayout = imageLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderingFragmentDensityMapAttachmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderingFragmentDensityMapAttachmentInfoEXT*>( this );
}
operator VkRenderingFragmentDensityMapAttachmentInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderingFragmentDensityMapAttachmentInfoEXT*>( this );
}
operator VkRenderingFragmentDensityMapAttachmentInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderingFragmentDensityMapAttachmentInfoEXT*>( this );
}
operator VkRenderingFragmentDensityMapAttachmentInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderingFragmentDensityMapAttachmentInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageView, imageLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderingFragmentDensityMapAttachmentInfoEXT const & ) const = default;
#else
bool operator==( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageView == rhs.imageView )
&& ( imageLayout == rhs.imageLayout );
#endif
}
bool operator!=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT>
{
using Type = RenderingFragmentDensityMapAttachmentInfoEXT;
};
// wrapper struct for struct VkRenderingFragmentShadingRateAttachmentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingFragmentShadingRateAttachmentInfoKHR.html
struct RenderingFragmentShadingRateAttachmentInfoKHR
{
using NativeType = VkRenderingFragmentShadingRateAttachmentInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, imageView{ imageView_ }, imageLayout{ imageLayout_ }, shadingRateAttachmentTexelSize{ shadingRateAttachmentTexelSize_ }
{}
VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderingFragmentShadingRateAttachmentInfoKHR( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderingFragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<RenderingFragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
{}
RenderingFragmentShadingRateAttachmentInfoKHR & operator=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderingFragmentShadingRateAttachmentInfoKHR & operator=( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
{
imageLayout = imageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderingFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderingFragmentShadingRateAttachmentInfoKHR*>( this );
}
operator VkRenderingFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderingFragmentShadingRateAttachmentInfoKHR*>( this );
}
operator VkRenderingFragmentShadingRateAttachmentInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderingFragmentShadingRateAttachmentInfoKHR*>( this );
}
operator VkRenderingFragmentShadingRateAttachmentInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderingFragmentShadingRateAttachmentInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, imageView, imageLayout, shadingRateAttachmentTexelSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderingFragmentShadingRateAttachmentInfoKHR const & ) const = default;
#else
bool operator==( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imageView == rhs.imageView )
&& ( imageLayout == rhs.imageLayout )
&& ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
#endif
}
bool operator!=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR>
{
using Type = RenderingFragmentShadingRateAttachmentInfoKHR;
};
// wrapper struct for struct VkRenderingInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingInfo.html
struct RenderingInfo
{
using NativeType = VkRenderingInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 RenderingInfo(VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t layerCount_ = {}, uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, renderArea{ renderArea_ }, layerCount{ layerCount_ }, viewMask{ viewMask_ }, colorAttachmentCount{ colorAttachmentCount_ }, pColorAttachments{ pColorAttachments_ }, pDepthAttachment{ pDepthAttachment_ }, pStencilAttachment{ pStencilAttachment_ }
{}
VULKAN_HPP_CONSTEXPR_14 RenderingInfo( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderingInfo( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderingInfo( *reinterpret_cast<RenderingInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, uint32_t layerCount_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo> const & colorAttachments_, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), renderArea( renderArea_ ), layerCount( layerCount_ ), viewMask( viewMask_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pDepthAttachment( pDepthAttachment_ ), pStencilAttachment( pStencilAttachment_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderingInfo & operator=( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderingInfo & operator=( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
{
renderArea = renderArea_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
{
layerCount = layerCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
{
viewMask = viewMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPColorAttachments( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachments = pColorAttachments_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderingInfo & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
pColorAttachments = colorAttachments_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPDepthAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pDepthAttachment = pDepthAttachment_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPStencilAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pStencilAttachment = pStencilAttachment_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderingInfo*>( this );
}
operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderingInfo*>( this );
}
operator VkRenderingInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderingInfo*>( this );
}
operator VkRenderingInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderingInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderingFlags const &, VULKAN_HPP_NAMESPACE::Rect2D const &, uint32_t const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, renderArea, layerCount, viewMask, colorAttachmentCount, pColorAttachments, pDepthAttachment, pStencilAttachment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderingInfo const & ) const = default;
#else
bool operator==( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( renderArea == rhs.renderArea )
&& ( layerCount == rhs.layerCount )
&& ( viewMask == rhs.viewMask )
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
&& ( pColorAttachments == rhs.pColorAttachments )
&& ( pDepthAttachment == rhs.pDepthAttachment )
&& ( pStencilAttachment == rhs.pStencilAttachment );
#endif
}
bool operator!=( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::RenderingFlags flags = {};
VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
uint32_t layerCount = {};
uint32_t viewMask = {};
uint32_t colorAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments = {};
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment = {};
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderingInfo>
{
using Type = RenderingInfo;
};
using RenderingInfoKHR = RenderingInfo;
// wrapper struct for struct VkRenderingInputAttachmentIndexInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingInputAttachmentIndexInfo.html
struct RenderingInputAttachmentIndexInfo
{
using NativeType = VkRenderingInputAttachmentIndexInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInputAttachmentIndexInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderingInputAttachmentIndexInfo(uint32_t colorAttachmentCount_ = {}, const uint32_t * pColorAttachmentInputIndices_ = {}, const uint32_t * pDepthInputAttachmentIndex_ = {}, const uint32_t * pStencilInputAttachmentIndex_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, colorAttachmentCount{ colorAttachmentCount_ }, pColorAttachmentInputIndices{ pColorAttachmentInputIndices_ }, pDepthInputAttachmentIndex{ pDepthInputAttachmentIndex_ }, pStencilInputAttachmentIndex{ pStencilInputAttachmentIndex_ }
{}
VULKAN_HPP_CONSTEXPR RenderingInputAttachmentIndexInfo( RenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderingInputAttachmentIndexInfo( VkRenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderingInputAttachmentIndexInfo( *reinterpret_cast<RenderingInputAttachmentIndexInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderingInputAttachmentIndexInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & colorAttachmentInputIndices_, const uint32_t * pDepthInputAttachmentIndex_ = {}, const uint32_t * pStencilInputAttachmentIndex_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachmentInputIndices_.size() ) ), pColorAttachmentInputIndices( colorAttachmentInputIndices_.data() ), pDepthInputAttachmentIndex( pDepthInputAttachmentIndex_ ), pStencilInputAttachmentIndex( pStencilInputAttachmentIndex_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
RenderingInputAttachmentIndexInfo & operator=( RenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
RenderingInputAttachmentIndexInfo & operator=( VkRenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setPColorAttachmentInputIndices( const uint32_t * pColorAttachmentInputIndices_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachmentInputIndices = pColorAttachmentInputIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderingInputAttachmentIndexInfo & setColorAttachmentInputIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & colorAttachmentInputIndices_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentInputIndices_.size() );
pColorAttachmentInputIndices = colorAttachmentInputIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setPDepthInputAttachmentIndex( const uint32_t * pDepthInputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
{
pDepthInputAttachmentIndex = pDepthInputAttachmentIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setPStencilInputAttachmentIndex( const uint32_t * pStencilInputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
{
pStencilInputAttachmentIndex = pStencilInputAttachmentIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkRenderingInputAttachmentIndexInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderingInputAttachmentIndexInfo*>( this );
}
operator VkRenderingInputAttachmentIndexInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderingInputAttachmentIndexInfo*>( this );
}
operator VkRenderingInputAttachmentIndexInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkRenderingInputAttachmentIndexInfo*>( this );
}
operator VkRenderingInputAttachmentIndexInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkRenderingInputAttachmentIndexInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, const uint32_t * const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentInputIndices, pDepthInputAttachmentIndex, pStencilInputAttachmentIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( RenderingInputAttachmentIndexInfo const & ) const = default;
#else
bool operator==( RenderingInputAttachmentIndexInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
&& ( pColorAttachmentInputIndices == rhs.pColorAttachmentInputIndices )
&& ( pDepthInputAttachmentIndex == rhs.pDepthInputAttachmentIndex )
&& ( pStencilInputAttachmentIndex == rhs.pStencilInputAttachmentIndex );
#endif
}
bool operator!=( RenderingInputAttachmentIndexInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInputAttachmentIndexInfo;
const void * pNext = {};
uint32_t colorAttachmentCount = {};
const uint32_t * pColorAttachmentInputIndices = {};
const uint32_t * pDepthInputAttachmentIndex = {};
const uint32_t * pStencilInputAttachmentIndex = {};
};
template <>
struct CppType<StructureType, StructureType::eRenderingInputAttachmentIndexInfo>
{
using Type = RenderingInputAttachmentIndexInfo;
};
using RenderingInputAttachmentIndexInfoKHR = RenderingInputAttachmentIndexInfo;
// wrapper struct for struct VkResolveImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkResolveImageInfo2.html
struct ResolveImageInfo2
{
using NativeType = VkResolveImageInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ResolveImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcImage{ srcImage_ }, srcImageLayout{ srcImageLayout_ }, dstImage{ dstImage_ }, dstImageLayout{ dstImageLayout_ }, regionCount{ regionCount_ }, pRegions{ pRegions_ }
{}
VULKAN_HPP_CONSTEXPR ResolveImageInfo2( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ResolveImageInfo2( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: ResolveImageInfo2( *reinterpret_cast<ResolveImageInfo2 const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2> const & regions_, const void * pNext_ = nullptr )
: pNext( pNext_ ), srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ResolveImageInfo2 & operator=( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ResolveImageInfo2 & operator=( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ResolveImageInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
{
srcImage = srcImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
srcImageLayout = srcImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
{
dstImage = dstImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
dstImageLayout = dstImageLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = regionCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ResolveImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkResolveImageInfo2*>( this );
}
operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkResolveImageInfo2*>( this );
}
operator VkResolveImageInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkResolveImageInfo2*>( this );
}
operator VkResolveImageInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkResolveImageInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageResolve2 * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ResolveImageInfo2 const & ) const = default;
#else
bool operator==( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcImage == rhs.srcImage )
&& ( srcImageLayout == rhs.srcImageLayout )
&& ( dstImage == rhs.dstImage )
&& ( dstImageLayout == rhs.dstImageLayout )
&& ( regionCount == rhs.regionCount )
&& ( pRegions == rhs.pRegions );
#endif
}
bool operator!=( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image srcImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::Image dstImage = {};
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions = {};
};
template <>
struct CppType<StructureType, StructureType::eResolveImageInfo2>
{
using Type = ResolveImageInfo2;
};
using ResolveImageInfo2KHR = ResolveImageInfo2;
// wrapper struct for struct VkSamplerBlockMatchWindowCreateInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerBlockMatchWindowCreateInfoQCOM.html
struct SamplerBlockMatchWindowCreateInfoQCOM
{
using NativeType = VkSamplerBlockMatchWindowCreateInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBlockMatchWindowCreateInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SamplerBlockMatchWindowCreateInfoQCOM(VULKAN_HPP_NAMESPACE::Extent2D windowExtent_ = {}, VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode_ = VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM::eMin, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, windowExtent{ windowExtent_ }, windowCompareMode{ windowCompareMode_ }
{}
VULKAN_HPP_CONSTEXPR SamplerBlockMatchWindowCreateInfoQCOM( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerBlockMatchWindowCreateInfoQCOM( VkSamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerBlockMatchWindowCreateInfoQCOM( *reinterpret_cast<SamplerBlockMatchWindowCreateInfoQCOM const *>( &rhs ) )
{}
SamplerBlockMatchWindowCreateInfoQCOM & operator=( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SamplerBlockMatchWindowCreateInfoQCOM & operator=( VkSamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & setWindowExtent( VULKAN_HPP_NAMESPACE::Extent2D const & windowExtent_ ) VULKAN_HPP_NOEXCEPT
{
windowExtent = windowExtent_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & setWindowCompareMode( VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode_ ) VULKAN_HPP_NOEXCEPT
{
windowCompareMode = windowCompareMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSamplerBlockMatchWindowCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerBlockMatchWindowCreateInfoQCOM*>( this );
}
operator VkSamplerBlockMatchWindowCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerBlockMatchWindowCreateInfoQCOM*>( this );
}
operator VkSamplerBlockMatchWindowCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSamplerBlockMatchWindowCreateInfoQCOM*>( this );
}
operator VkSamplerBlockMatchWindowCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSamplerBlockMatchWindowCreateInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, windowExtent, windowCompareMode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SamplerBlockMatchWindowCreateInfoQCOM const & ) const = default;
#else
bool operator==( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( windowExtent == rhs.windowExtent )
&& ( windowCompareMode == rhs.windowCompareMode );
#endif
}
bool operator!=( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBlockMatchWindowCreateInfoQCOM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D windowExtent = {};
VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode = VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM::eMin;
};
template <>
struct CppType<StructureType, StructureType::eSamplerBlockMatchWindowCreateInfoQCOM>
{
using Type = SamplerBlockMatchWindowCreateInfoQCOM;
};
// wrapper struct for struct VkSamplerBorderColorComponentMappingCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerBorderColorComponentMappingCreateInfoEXT.html
struct SamplerBorderColorComponentMappingCreateInfoEXT
{
using NativeType = VkSamplerBorderColorComponentMappingCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT(VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::Bool32 srgb_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, components{ components_ }, srgb{ srgb_ }
{}
VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerBorderColorComponentMappingCreateInfoEXT( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerBorderColorComponentMappingCreateInfoEXT( *reinterpret_cast<SamplerBorderColorComponentMappingCreateInfoEXT const *>( &rhs ) )
{}
SamplerBorderColorComponentMappingCreateInfoEXT & operator=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SamplerBorderColorComponentMappingCreateInfoEXT & operator=( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
{
components = components_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setSrgb( VULKAN_HPP_NAMESPACE::Bool32 srgb_ ) VULKAN_HPP_NOEXCEPT
{
srgb = srgb_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSamplerBorderColorComponentMappingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerBorderColorComponentMappingCreateInfoEXT*>( this );
}
operator VkSamplerBorderColorComponentMappingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerBorderColorComponentMappingCreateInfoEXT*>( this );
}
operator VkSamplerBorderColorComponentMappingCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSamplerBorderColorComponentMappingCreateInfoEXT*>( this );
}
operator VkSamplerBorderColorComponentMappingCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSamplerBorderColorComponentMappingCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, components, srgb );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SamplerBorderColorComponentMappingCreateInfoEXT const & ) const = default;
#else
bool operator==( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( components == rhs.components )
&& ( srgb == rhs.srgb );
#endif
}
bool operator!=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
VULKAN_HPP_NAMESPACE::Bool32 srgb = {};
};
template <>
struct CppType<StructureType, StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT>
{
using Type = SamplerBorderColorComponentMappingCreateInfoEXT;
};
// wrapper struct for struct VkSamplerCaptureDescriptorDataInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCaptureDescriptorDataInfoEXT.html
struct SamplerCaptureDescriptorDataInfoEXT
{
using NativeType = VkSamplerCaptureDescriptorDataInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCaptureDescriptorDataInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, sampler{ sampler_ }
{}
VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT( SamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerCaptureDescriptorDataInfoEXT( VkSamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerCaptureDescriptorDataInfoEXT( *reinterpret_cast<SamplerCaptureDescriptorDataInfoEXT const *>( &rhs ) )
{}
SamplerCaptureDescriptorDataInfoEXT & operator=( SamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SamplerCaptureDescriptorDataInfoEXT & operator=( VkSamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SamplerCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCaptureDescriptorDataInfoEXT & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
{
sampler = sampler_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSamplerCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT*>( this );
}
operator VkSamplerCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerCaptureDescriptorDataInfoEXT*>( this );
}
operator VkSamplerCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT*>( this );
}
operator VkSamplerCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSamplerCaptureDescriptorDataInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Sampler const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, sampler );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SamplerCaptureDescriptorDataInfoEXT const & ) const = default;
#else
bool operator==( SamplerCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( sampler == rhs.sampler );
#endif
}
bool operator!=( SamplerCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCaptureDescriptorDataInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Sampler sampler = {};
};
template <>
struct CppType<StructureType, StructureType::eSamplerCaptureDescriptorDataInfoEXT>
{
using Type = SamplerCaptureDescriptorDataInfoEXT;
};
// wrapper struct for struct VkSamplerCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCreateInfo.html
struct SamplerCreateInfo
{
using NativeType = VkSamplerCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SamplerCreateInfo(VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, float mipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, float maxAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, float minLod_ = {}, float maxLod_ = {}, VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, magFilter{ magFilter_ }, minFilter{ minFilter_ }, mipmapMode{ mipmapMode_ }, addressModeU{ addressModeU_ }, addressModeV{ addressModeV_ }, addressModeW{ addressModeW_ }, mipLodBias{ mipLodBias_ }, anisotropyEnable{ anisotropyEnable_ }, maxAnisotropy{ maxAnisotropy_ }, compareEnable{ compareEnable_ }, compareOp{ compareOp_ }, minLod{ minLod_ }, maxLod{ maxLod_ }, borderColor{ borderColor_ }, unnormalizedCoordinates{ unnormalizedCoordinates_ }
{}
VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerCreateInfo( *reinterpret_cast<SamplerCreateInfo const *>( &rhs ) )
{}
SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
{
magFilter = magFilter_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
{
minFilter = minFilter_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
{
mipmapMode = mipmapMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
{
addressModeU = addressModeU_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
{
addressModeV = addressModeV_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
{
addressModeW = addressModeW_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT
{
mipLodBias = mipLodBias_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
{
anisotropyEnable = anisotropyEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT
{
maxAnisotropy = maxAnisotropy_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
{
compareEnable = compareEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
{
compareOp = compareOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
{
minLod = minLod_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT
{
maxLod = maxLod_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
{
borderColor = borderColor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
{
unnormalizedCoordinates = unnormalizedCoordinates_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerCreateInfo*>( this );
}
operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerCreateInfo*>( this );
}
operator VkSamplerCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSamplerCreateInfo*>( this );
}
operator VkSamplerCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSamplerCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerCreateFlags const &, VULKAN_HPP_NAMESPACE::Filter const &, VULKAN_HPP_NAMESPACE::Filter const &, VULKAN_HPP_NAMESPACE::SamplerMipmapMode const &, VULKAN_HPP_NAMESPACE::SamplerAddressMode const &, VULKAN_HPP_NAMESPACE::SamplerAddressMode const &, VULKAN_HPP_NAMESPACE::SamplerAddressMode const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::CompareOp const &, float const &, float const &, VULKAN_HPP_NAMESPACE::BorderColor const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, magFilter, minFilter, mipmapMode, addressModeU, addressModeV, addressModeW, mipLodBias, anisotropyEnable, maxAnisotropy, compareEnable, compareOp, minLod, maxLod, borderColor, unnormalizedCoordinates );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SamplerCreateInfo const & ) const = default;
#else
bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( magFilter == rhs.magFilter )
&& ( minFilter == rhs.minFilter )
&& ( mipmapMode == rhs.mipmapMode )
&& ( addressModeU == rhs.addressModeU )
&& ( addressModeV == rhs.addressModeV )
&& ( addressModeW == rhs.addressModeW )
&& ( mipLodBias == rhs.mipLodBias )
&& ( anisotropyEnable == rhs.anisotropyEnable )
&& ( maxAnisotropy == rhs.maxAnisotropy )
&& ( compareEnable == rhs.compareEnable )
&& ( compareOp == rhs.compareOp )
&& ( minLod == rhs.minLod )
&& ( maxLod == rhs.maxLod )
&& ( borderColor == rhs.borderColor )
&& ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
#endif
}
bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest;
VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
float mipLodBias = {};
VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {};
float maxAnisotropy = {};
VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {};
VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
float minLod = {};
float maxLod = {};
VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack;
VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {};
};
template <>
struct CppType<StructureType, StructureType::eSamplerCreateInfo>
{
using Type = SamplerCreateInfo;
};
// wrapper struct for struct VkSamplerCubicWeightsCreateInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCubicWeightsCreateInfoQCOM.html
struct SamplerCubicWeightsCreateInfoQCOM
{
using NativeType = VkSamplerCubicWeightsCreateInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCubicWeightsCreateInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SamplerCubicWeightsCreateInfoQCOM(VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, cubicWeights{ cubicWeights_ }
{}
VULKAN_HPP_CONSTEXPR SamplerCubicWeightsCreateInfoQCOM( SamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerCubicWeightsCreateInfoQCOM( VkSamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerCubicWeightsCreateInfoQCOM( *reinterpret_cast<SamplerCubicWeightsCreateInfoQCOM const *>( &rhs ) )
{}
SamplerCubicWeightsCreateInfoQCOM & operator=( SamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SamplerCubicWeightsCreateInfoQCOM & operator=( VkSamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SamplerCubicWeightsCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCubicWeightsCreateInfoQCOM & setCubicWeights( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ ) VULKAN_HPP_NOEXCEPT
{
cubicWeights = cubicWeights_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSamplerCubicWeightsCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerCubicWeightsCreateInfoQCOM*>( this );
}
operator VkSamplerCubicWeightsCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerCubicWeightsCreateInfoQCOM*>( this );
}
operator VkSamplerCubicWeightsCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSamplerCubicWeightsCreateInfoQCOM*>( this );
}
operator VkSamplerCubicWeightsCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSamplerCubicWeightsCreateInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, cubicWeights );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SamplerCubicWeightsCreateInfoQCOM const & ) const = default;
#else
bool operator==( SamplerCubicWeightsCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( cubicWeights == rhs.cubicWeights );
#endif
}
bool operator!=( SamplerCubicWeightsCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCubicWeightsCreateInfoQCOM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom;
};
template <>
struct CppType<StructureType, StructureType::eSamplerCubicWeightsCreateInfoQCOM>
{
using Type = SamplerCubicWeightsCreateInfoQCOM;
};
// wrapper struct for struct VkSamplerCustomBorderColorCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCustomBorderColorCreateInfoEXT.html
struct SamplerCustomBorderColorCreateInfoEXT
{
using NativeType = VkSamplerCustomBorderColorCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT(VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, customBorderColor{ customBorderColor_ }, format{ format_ }
{}
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerCustomBorderColorCreateInfoEXT( *reinterpret_cast<SamplerCustomBorderColorCreateInfoEXT const *>( &rhs ) )
{}
SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT
{
customBorderColor = customBorderColor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT*>( this );
}
operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerCustomBorderColorCreateInfoEXT*>( this );
}
operator VkSamplerCustomBorderColorCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT*>( this );
}
operator VkSamplerCustomBorderColorCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSamplerCustomBorderColorCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ClearColorValue const &, VULKAN_HPP_NAMESPACE::Format const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, customBorderColor, format );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::eSamplerCustomBorderColorCreateInfoEXT>
{
using Type = SamplerCustomBorderColorCreateInfoEXT;
};
// wrapper struct for struct VkSamplerReductionModeCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerReductionModeCreateInfo.html
struct SamplerReductionModeCreateInfo
{
using NativeType = VkSamplerReductionModeCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo(VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, reductionMode{ reductionMode_ }
{}
VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerReductionModeCreateInfo( *reinterpret_cast<SamplerReductionModeCreateInfo const *>( &rhs ) )
{}
SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT
{
reductionMode = reductionMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerReductionModeCreateInfo*>( this );
}
operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerReductionModeCreateInfo*>( this );
}
operator VkSamplerReductionModeCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSamplerReductionModeCreateInfo*>( this );
}
operator VkSamplerReductionModeCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSamplerReductionModeCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerReductionMode const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, reductionMode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default;
#else
bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( reductionMode == rhs.reductionMode );
#endif
}
bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage;
};
template <>
struct CppType<StructureType, StructureType::eSamplerReductionModeCreateInfo>
{
using Type = SamplerReductionModeCreateInfo;
};
using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;
// wrapper struct for struct VkSamplerYcbcrConversionCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversionCreateInfo.html
struct SamplerYcbcrConversionCreateInfo
{
using NativeType = VkSamplerYcbcrConversionCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, format{ format_ }, ycbcrModel{ ycbcrModel_ }, ycbcrRange{ ycbcrRange_ }, components{ components_ }, xChromaOffset{ xChromaOffset_ }, yChromaOffset{ yChromaOffset_ }, chromaFilter{ chromaFilter_ }, forceExplicitReconstruction{ forceExplicitReconstruction_ }
{}
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerYcbcrConversionCreateInfo( *reinterpret_cast<SamplerYcbcrConversionCreateInfo const *>( &rhs ) )
{}
SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
{
ycbcrModel = ycbcrModel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
{
ycbcrRange = ycbcrRange_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
{
components = components_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
{
xChromaOffset = xChromaOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
{
yChromaOffset = yChromaOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
{
chromaFilter = chromaFilter_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
{
forceExplicitReconstruction = forceExplicitReconstruction_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( this );
}
operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo*>( this );
}
operator VkSamplerYcbcrConversionCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( this );
}
operator VkSamplerYcbcrConversionCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSamplerYcbcrConversionCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::Filter const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, format, ycbcrModel, ycbcrRange, components, xChromaOffset, yChromaOffset, chromaFilter, forceExplicitReconstruction );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default;
#else
bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( format == rhs.format )
&& ( ycbcrModel == rhs.ycbcrModel )
&& ( ycbcrRange == rhs.ycbcrRange )
&& ( components == rhs.components )
&& ( xChromaOffset == rhs.xChromaOffset )
&& ( yChromaOffset == rhs.yChromaOffset )
&& ( chromaFilter == rhs.chromaFilter )
&& ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
#endif
}
bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {};
};
template <>
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionCreateInfo>
{
using Type = SamplerYcbcrConversionCreateInfo;
};
using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
// wrapper struct for struct VkSamplerYcbcrConversionImageFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversionImageFormatProperties.html
struct SamplerYcbcrConversionImageFormatProperties
{
using NativeType = VkSamplerYcbcrConversionImageFormatProperties;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(uint32_t combinedImageSamplerDescriptorCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, combinedImageSamplerDescriptorCount{ combinedImageSamplerDescriptorCount_ }
{}
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerYcbcrConversionImageFormatProperties( *reinterpret_cast<SamplerYcbcrConversionImageFormatProperties const *>( &rhs ) )
{}
SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const *>( &rhs );
return *this;
}
operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties*>( this );
}
operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties*>( this );
}
operator VkSamplerYcbcrConversionImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties*>( this );
}
operator VkSamplerYcbcrConversionImageFormatProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, combinedImageSamplerDescriptorCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default;
#else
bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
#endif
}
bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
void * pNext = {};
uint32_t combinedImageSamplerDescriptorCount = {};
};
template <>
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionImageFormatProperties>
{
using Type = SamplerYcbcrConversionImageFormatProperties;
};
using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;
// wrapper struct for struct VkSamplerYcbcrConversionInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversionInfo.html
struct SamplerYcbcrConversionInfo
{
using NativeType = VkSamplerYcbcrConversionInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, conversion{ conversion_ }
{}
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerYcbcrConversionInfo( *reinterpret_cast<SamplerYcbcrConversionInfo const *>( &rhs ) )
{}
SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
{
conversion = conversion_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerYcbcrConversionInfo*>( this );
}
operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerYcbcrConversionInfo*>( this );
}
operator VkSamplerYcbcrConversionInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSamplerYcbcrConversionInfo*>( this );
}
operator VkSamplerYcbcrConversionInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSamplerYcbcrConversionInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, conversion );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default;
#else
bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( conversion == rhs.conversion );
#endif
}
bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {};
};
template <>
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionInfo>
{
using Type = SamplerYcbcrConversionInfo;
};
using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
// wrapper struct for struct VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.html
struct SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM
{
using NativeType = VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM(VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma_ = {}, VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, enableYDegamma{ enableYDegamma_ }, enableCbCrDegamma{ enableCbCrDegamma_ }
{}
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( *reinterpret_cast<SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const *>( &rhs ) )
{}
SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & operator=( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & operator=( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & setEnableYDegamma( VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma_ ) VULKAN_HPP_NOEXCEPT
{
enableYDegamma = enableYDegamma_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & setEnableCbCrDegamma( VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma_ ) VULKAN_HPP_NOEXCEPT
{
enableCbCrDegamma = enableCbCrDegamma_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM*>( this );
}
operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM*>( this );
}
operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM*>( this );
}
operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, enableYDegamma, enableCbCrDegamma );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & ) const = default;
#else
bool operator==( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( enableYDegamma == rhs.enableYDegamma )
&& ( enableCbCrDegamma == rhs.enableCbCrDegamma );
#endif
}
bool operator!=( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma = {};
VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma = {};
};
template <>
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM>
{
using Type = SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM;
};
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
// wrapper struct for struct VkScreenBufferFormatPropertiesQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkScreenBufferFormatPropertiesQNX.html
struct ScreenBufferFormatPropertiesQNX
{
using NativeType = VkScreenBufferFormatPropertiesQNX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenBufferFormatPropertiesQNX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ScreenBufferFormatPropertiesQNX(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, uint64_t screenUsage_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, format{ format_ }, externalFormat{ externalFormat_ }, screenUsage{ screenUsage_ }, formatFeatures{ formatFeatures_ }, samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ }, suggestedYcbcrModel{ suggestedYcbcrModel_ }, suggestedYcbcrRange{ suggestedYcbcrRange_ }, suggestedXChromaOffset{ suggestedXChromaOffset_ }, suggestedYChromaOffset{ suggestedYChromaOffset_ }
{}
VULKAN_HPP_CONSTEXPR ScreenBufferFormatPropertiesQNX( ScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ScreenBufferFormatPropertiesQNX( VkScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
: ScreenBufferFormatPropertiesQNX( *reinterpret_cast<ScreenBufferFormatPropertiesQNX const *>( &rhs ) )
{}
ScreenBufferFormatPropertiesQNX & operator=( ScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ScreenBufferFormatPropertiesQNX & operator=( VkScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX const *>( &rhs );
return *this;
}
operator VkScreenBufferFormatPropertiesQNX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkScreenBufferFormatPropertiesQNX*>( this );
}
operator VkScreenBufferFormatPropertiesQNX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkScreenBufferFormatPropertiesQNX*>( this );
}
operator VkScreenBufferFormatPropertiesQNX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkScreenBufferFormatPropertiesQNX*>( this );
}
operator VkScreenBufferFormatPropertiesQNX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkScreenBufferFormatPropertiesQNX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &, uint64_t const &, uint64_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, format, externalFormat, screenUsage, formatFeatures, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ScreenBufferFormatPropertiesQNX const & ) const = default;
#else
bool operator==( ScreenBufferFormatPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( format == rhs.format )
&& ( externalFormat == rhs.externalFormat )
&& ( screenUsage == rhs.screenUsage )
&& ( formatFeatures == rhs.formatFeatures )
&& ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
&& ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
&& ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
&& ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
&& ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
#endif
}
bool operator!=( ScreenBufferFormatPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenBufferFormatPropertiesQNX;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
uint64_t externalFormat = {};
uint64_t screenUsage = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
};
template <>
struct CppType<StructureType, StructureType::eScreenBufferFormatPropertiesQNX>
{
using Type = ScreenBufferFormatPropertiesQNX;
};
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
// wrapper struct for struct VkScreenBufferPropertiesQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkScreenBufferPropertiesQNX.html
struct ScreenBufferPropertiesQNX
{
using NativeType = VkScreenBufferPropertiesQNX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenBufferPropertiesQNX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ScreenBufferPropertiesQNX(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, allocationSize{ allocationSize_ }, memoryTypeBits{ memoryTypeBits_ }
{}
VULKAN_HPP_CONSTEXPR ScreenBufferPropertiesQNX( ScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ScreenBufferPropertiesQNX( VkScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
: ScreenBufferPropertiesQNX( *reinterpret_cast<ScreenBufferPropertiesQNX const *>( &rhs ) )
{}
ScreenBufferPropertiesQNX & operator=( ScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ScreenBufferPropertiesQNX & operator=( VkScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX const *>( &rhs );
return *this;
}
operator VkScreenBufferPropertiesQNX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkScreenBufferPropertiesQNX*>( this );
}
operator VkScreenBufferPropertiesQNX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkScreenBufferPropertiesQNX*>( this );
}
operator VkScreenBufferPropertiesQNX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkScreenBufferPropertiesQNX*>( this );
}
operator VkScreenBufferPropertiesQNX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkScreenBufferPropertiesQNX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, allocationSize, memoryTypeBits );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ScreenBufferPropertiesQNX const & ) const = default;
#else
bool operator==( ScreenBufferPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( allocationSize == rhs.allocationSize )
&& ( memoryTypeBits == rhs.memoryTypeBits );
#endif
}
bool operator!=( ScreenBufferPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenBufferPropertiesQNX;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
uint32_t memoryTypeBits = {};
};
template <>
struct CppType<StructureType, StructureType::eScreenBufferPropertiesQNX>
{
using Type = ScreenBufferPropertiesQNX;
};
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
// wrapper struct for struct VkScreenSurfaceCreateInfoQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkScreenSurfaceCreateInfoQNX.html
struct ScreenSurfaceCreateInfoQNX
{
using NativeType = VkScreenSurfaceCreateInfoQNX;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenSurfaceCreateInfoQNX;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX(VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ = {}, struct _screen_context * context_ = {}, struct _screen_window * window_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, context{ context_ }, window{ window_ }
{}
VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
: ScreenSurfaceCreateInfoQNX( *reinterpret_cast<ScreenSurfaceCreateInfoQNX const *>( &rhs ) )
{}
ScreenSurfaceCreateInfoQNX & operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ScreenSurfaceCreateInfoQNX & operator=( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT
{
context = context_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT
{
window = window_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkScreenSurfaceCreateInfoQNX*>( this );
}
operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkScreenSurfaceCreateInfoQNX*>( this );
}
operator VkScreenSurfaceCreateInfoQNX const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkScreenSurfaceCreateInfoQNX*>( this );
}
operator VkScreenSurfaceCreateInfoQNX *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkScreenSurfaceCreateInfoQNX*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX const &, struct _screen_context * const &, struct _screen_window * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, context, window );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ScreenSurfaceCreateInfoQNX const & ) const = default;
#else
bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( context == rhs.context )
&& ( window == rhs.window );
#endif
}
bool operator!=( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenSurfaceCreateInfoQNX;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags = {};
struct _screen_context * context = {};
struct _screen_window * window = {};
};
template <>
struct CppType<StructureType, StructureType::eScreenSurfaceCreateInfoQNX>
{
using Type = ScreenSurfaceCreateInfoQNX;
};
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
// wrapper struct for struct VkSemaphoreCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreCreateInfo.html
struct SemaphoreCreateInfo
{
using NativeType = VkSemaphoreCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SemaphoreCreateInfo( *reinterpret_cast<SemaphoreCreateInfo const *>( &rhs ) )
{}
SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreCreateInfo*>( this );
}
operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreCreateInfo*>( this );
}
operator VkSemaphoreCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSemaphoreCreateInfo*>( this );
}
operator VkSemaphoreCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSemaphoreCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SemaphoreCreateInfo const & ) const = default;
#else
bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {};
};
template <>
struct CppType<StructureType, StructureType::eSemaphoreCreateInfo>
{
using Type = SemaphoreCreateInfo;
};
// wrapper struct for struct VkSemaphoreGetFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreGetFdInfoKHR.html
struct SemaphoreGetFdInfoKHR
{
using NativeType = VkSemaphoreGetFdInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, semaphore{ semaphore_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SemaphoreGetFdInfoKHR( *reinterpret_cast<SemaphoreGetFdInfoKHR const *>( &rhs ) )
{}
SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( this );
}
operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreGetFdInfoKHR*>( this );
}
operator VkSemaphoreGetFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( this );
}
operator VkSemaphoreGetFdInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSemaphoreGetFdInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, semaphore, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default;
#else
bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::eSemaphoreGetFdInfoKHR>
{
using Type = SemaphoreGetFdInfoKHR;
};
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkSemaphoreGetWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreGetWin32HandleInfoKHR.html
struct SemaphoreGetWin32HandleInfoKHR
{
using NativeType = VkSemaphoreGetWin32HandleInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, semaphore{ semaphore_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast<SemaphoreGetWin32HandleInfoKHR const *>( &rhs ) )
{}
SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( this );
}
operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR*>( this );
}
operator VkSemaphoreGetWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( this );
}
operator VkSemaphoreGetWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, semaphore, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default;
#else
bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::eSemaphoreGetWin32HandleInfoKHR>
{
using Type = SemaphoreGetWin32HandleInfoKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
// wrapper struct for struct VkSemaphoreGetZirconHandleInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreGetZirconHandleInfoFUCHSIA.html
struct SemaphoreGetZirconHandleInfoFUCHSIA
{
using NativeType = VkSemaphoreGetZirconHandleInfoFUCHSIA;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, semaphore{ semaphore_ }, handleType{ handleType_ }
{}
VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast<SemaphoreGetZirconHandleInfoFUCHSIA const *>( &rhs ) )
{}
SemaphoreGetZirconHandleInfoFUCHSIA & operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SemaphoreGetZirconHandleInfoFUCHSIA & operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA*>( this );
}
operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreGetZirconHandleInfoFUCHSIA*>( this );
}
operator VkSemaphoreGetZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA*>( this );
}
operator VkSemaphoreGetZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSemaphoreGetZirconHandleInfoFUCHSIA*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, semaphore, handleType );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SemaphoreGetZirconHandleInfoFUCHSIA const & ) const = default;
#else
bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
&& ( handleType == rhs.handleType );
#endif
}
bool operator!=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
};
template <>
struct CppType<StructureType, StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA>
{
using Type = SemaphoreGetZirconHandleInfoFUCHSIA;
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
// wrapper struct for struct VkSemaphoreSignalInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreSignalInfo.html
struct SemaphoreSignalInfo
{
using NativeType = VkSemaphoreSignalInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, semaphore{ semaphore_ }, value{ value_ }
{}
VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SemaphoreSignalInfo( *reinterpret_cast<SemaphoreSignalInfo const *>( &rhs ) )
{}
SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
{
value = value_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreSignalInfo*>( this );
}
operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreSignalInfo*>( this );
}
operator VkSemaphoreSignalInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSemaphoreSignalInfo*>( this );
}
operator VkSemaphoreSignalInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSemaphoreSignalInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, semaphore, value );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SemaphoreSignalInfo const & ) const = default;
#else
bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
&& ( value == rhs.value );
#endif
}
bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
uint64_t value = {};
};
template <>
struct CppType<StructureType, StructureType::eSemaphoreSignalInfo>
{
using Type = SemaphoreSignalInfo;
};
using SemaphoreSignalInfoKHR = SemaphoreSignalInfo;
// wrapper struct for struct VkSemaphoreTypeCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreTypeCreateInfo.html
struct SemaphoreTypeCreateInfo
{
using NativeType = VkSemaphoreTypeCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, uint64_t initialValue_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, semaphoreType{ semaphoreType_ }, initialValue{ initialValue_ }
{}
VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SemaphoreTypeCreateInfo( *reinterpret_cast<SemaphoreTypeCreateInfo const *>( &rhs ) )
{}
SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT
{
semaphoreType = semaphoreType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT
{
initialValue = initialValue_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreTypeCreateInfo*>( this );
}
operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreTypeCreateInfo*>( this );
}
operator VkSemaphoreTypeCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSemaphoreTypeCreateInfo*>( this );
}
operator VkSemaphoreTypeCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSemaphoreTypeCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreType const &, uint64_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, semaphoreType, initialValue );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default;
#else
bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( semaphoreType == rhs.semaphoreType )
&& ( initialValue == rhs.initialValue );
#endif
}
bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary;
uint64_t initialValue = {};
};
template <>
struct CppType<StructureType, StructureType::eSemaphoreTypeCreateInfo>
{
using Type = SemaphoreTypeCreateInfo;
};
using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;
// wrapper struct for struct VkSemaphoreWaitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreWaitInfo.html
struct SemaphoreWaitInfo
{
using NativeType = VkSemaphoreWaitInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo(VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, uint32_t semaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {}, const uint64_t * pValues_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, semaphoreCount{ semaphoreCount_ }, pSemaphores{ pSemaphores_ }, pValues{ pValues_ }
{}
VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SemaphoreWaitInfo( *reinterpret_cast<SemaphoreWaitInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), semaphoreCount( static_cast<uint32_t>( semaphores_.size() ) ), pSemaphores( semaphores_.data() ), pValues( values_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() );
#else
if ( semaphores_.size() != values_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
semaphoreCount = semaphoreCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pSemaphores = pSemaphores_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SemaphoreWaitInfo & setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_ ) VULKAN_HPP_NOEXCEPT
{
semaphoreCount = static_cast<uint32_t>( semaphores_.size() );
pSemaphores = semaphores_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT
{
pValues = pValues_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
{
semaphoreCount = static_cast<uint32_t>( values_.size() );
pValues = values_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreWaitInfo*>( this );
}
operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreWaitInfo*>( this );
}
operator VkSemaphoreWaitInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSemaphoreWaitInfo*>( this );
}
operator VkSemaphoreWaitInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSemaphoreWaitInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &, const uint64_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, semaphoreCount, pSemaphores, pValues );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SemaphoreWaitInfo const & ) const = default;
#else
bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( semaphoreCount == rhs.semaphoreCount )
&& ( pSemaphores == rhs.pSemaphores )
&& ( pValues == rhs.pValues );
#endif
}
bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {};
uint32_t semaphoreCount = {};
const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {};
const uint64_t * pValues = {};
};
template <>
struct CppType<StructureType, StructureType::eSemaphoreWaitInfo>
{
using Type = SemaphoreWaitInfo;
};
using SemaphoreWaitInfoKHR = SemaphoreWaitInfo;
// wrapper struct for struct VkSetDescriptorBufferOffsetsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSetDescriptorBufferOffsetsInfoEXT.html
struct SetDescriptorBufferOffsetsInfoEXT
{
using NativeType = VkSetDescriptorBufferOffsetsInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSetDescriptorBufferOffsetsInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SetDescriptorBufferOffsetsInfoEXT(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, uint32_t firstSet_ = {}, uint32_t setCount_ = {}, const uint32_t * pBufferIndices_ = {}, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stageFlags{ stageFlags_ }, layout{ layout_ }, firstSet{ firstSet_ }, setCount{ setCount_ }, pBufferIndices{ pBufferIndices_ }, pOffsets{ pOffsets_ }
{}
VULKAN_HPP_CONSTEXPR SetDescriptorBufferOffsetsInfoEXT( SetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SetDescriptorBufferOffsetsInfoEXT( VkSetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SetDescriptorBufferOffsetsInfoEXT( *reinterpret_cast<SetDescriptorBufferOffsetsInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SetDescriptorBufferOffsetsInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::PipelineLayout layout_, uint32_t firstSet_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & bufferIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), stageFlags( stageFlags_ ), layout( layout_ ), firstSet( firstSet_ ), setCount( static_cast<uint32_t>( bufferIndices_.size() ) ), pBufferIndices( bufferIndices_.data() ), pOffsets( offsets_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( bufferIndices_.size() == offsets_.size() );
#else
if ( bufferIndices_.size() != offsets_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SetDescriptorBufferOffsetsInfoEXT::SetDescriptorBufferOffsetsInfoEXT: bufferIndices_.size() != offsets_.size()" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SetDescriptorBufferOffsetsInfoEXT & operator=( SetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SetDescriptorBufferOffsetsInfoEXT & operator=( VkSetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
{
stageFlags = stageFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setFirstSet( uint32_t firstSet_ ) VULKAN_HPP_NOEXCEPT
{
firstSet = firstSet_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setSetCount( uint32_t setCount_ ) VULKAN_HPP_NOEXCEPT
{
setCount = setCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPBufferIndices( const uint32_t * pBufferIndices_ ) VULKAN_HPP_NOEXCEPT
{
pBufferIndices = pBufferIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SetDescriptorBufferOffsetsInfoEXT & setBufferIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & bufferIndices_ ) VULKAN_HPP_NOEXCEPT
{
setCount = static_cast<uint32_t>( bufferIndices_.size() );
pBufferIndices = bufferIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPOffsets( const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets_ ) VULKAN_HPP_NOEXCEPT
{
pOffsets = pOffsets_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SetDescriptorBufferOffsetsInfoEXT & setOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets_ ) VULKAN_HPP_NOEXCEPT
{
setCount = static_cast<uint32_t>( offsets_.size() );
pOffsets = offsets_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSetDescriptorBufferOffsetsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT*>( this );
}
operator VkSetDescriptorBufferOffsetsInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSetDescriptorBufferOffsetsInfoEXT*>( this );
}
operator VkSetDescriptorBufferOffsetsInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT*>( this );
}
operator VkSetDescriptorBufferOffsetsInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSetDescriptorBufferOffsetsInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, uint32_t const &, uint32_t const &, const uint32_t * const &, const VULKAN_HPP_NAMESPACE::DeviceSize * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stageFlags, layout, firstSet, setCount, pBufferIndices, pOffsets );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SetDescriptorBufferOffsetsInfoEXT const & ) const = default;
#else
bool operator==( SetDescriptorBufferOffsetsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stageFlags == rhs.stageFlags )
&& ( layout == rhs.layout )
&& ( firstSet == rhs.firstSet )
&& ( setCount == rhs.setCount )
&& ( pBufferIndices == rhs.pBufferIndices )
&& ( pOffsets == rhs.pOffsets );
#endif
}
bool operator!=( SetDescriptorBufferOffsetsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSetDescriptorBufferOffsetsInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
uint32_t firstSet = {};
uint32_t setCount = {};
const uint32_t * pBufferIndices = {};
const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets = {};
};
template <>
struct CppType<StructureType, StructureType::eSetDescriptorBufferOffsetsInfoEXT>
{
using Type = SetDescriptorBufferOffsetsInfoEXT;
};
// wrapper struct for struct VkSetLatencyMarkerInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSetLatencyMarkerInfoNV.html
struct SetLatencyMarkerInfoNV
{
using NativeType = VkSetLatencyMarkerInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSetLatencyMarkerInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV(uint64_t presentID_ = {}, VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker_ = VULKAN_HPP_NAMESPACE::LatencyMarkerNV::eSimulationStart, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentID{ presentID_ }, marker{ marker_ }
{}
VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV( SetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SetLatencyMarkerInfoNV( VkSetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: SetLatencyMarkerInfoNV( *reinterpret_cast<SetLatencyMarkerInfoNV const *>( &rhs ) )
{}
SetLatencyMarkerInfoNV & operator=( SetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SetLatencyMarkerInfoNV & operator=( VkSetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT
{
presentID = presentID_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setMarker( VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker_ ) VULKAN_HPP_NOEXCEPT
{
marker = marker_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSetLatencyMarkerInfoNV*>( this );
}
operator VkSetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSetLatencyMarkerInfoNV*>( this );
}
operator VkSetLatencyMarkerInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSetLatencyMarkerInfoNV*>( this );
}
operator VkSetLatencyMarkerInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSetLatencyMarkerInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, VULKAN_HPP_NAMESPACE::LatencyMarkerNV const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentID, marker );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SetLatencyMarkerInfoNV const & ) const = default;
#else
bool operator==( SetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentID == rhs.presentID )
&& ( marker == rhs.marker );
#endif
}
bool operator!=( SetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSetLatencyMarkerInfoNV;
const void * pNext = {};
uint64_t presentID = {};
VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker = VULKAN_HPP_NAMESPACE::LatencyMarkerNV::eSimulationStart;
};
template <>
struct CppType<StructureType, StructureType::eSetLatencyMarkerInfoNV>
{
using Type = SetLatencyMarkerInfoNV;
};
#if defined( VK_ENABLE_BETA_EXTENSIONS )
// wrapper struct for struct VkSetPresentConfigNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSetPresentConfigNV.html
struct SetPresentConfigNV
{
using NativeType = VkSetPresentConfigNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSetPresentConfigNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SetPresentConfigNV(uint32_t numFramesPerBatch_ = {}, uint32_t presentConfigFeedback_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, numFramesPerBatch{ numFramesPerBatch_ }, presentConfigFeedback{ presentConfigFeedback_ }
{}
VULKAN_HPP_CONSTEXPR SetPresentConfigNV( SetPresentConfigNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SetPresentConfigNV( VkSetPresentConfigNV const & rhs ) VULKAN_HPP_NOEXCEPT
: SetPresentConfigNV( *reinterpret_cast<SetPresentConfigNV const *>( &rhs ) )
{}
SetPresentConfigNV & operator=( SetPresentConfigNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SetPresentConfigNV & operator=( VkSetPresentConfigNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SetPresentConfigNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SetPresentConfigNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SetPresentConfigNV & setNumFramesPerBatch( uint32_t numFramesPerBatch_ ) VULKAN_HPP_NOEXCEPT
{
numFramesPerBatch = numFramesPerBatch_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SetPresentConfigNV & setPresentConfigFeedback( uint32_t presentConfigFeedback_ ) VULKAN_HPP_NOEXCEPT
{
presentConfigFeedback = presentConfigFeedback_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSetPresentConfigNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSetPresentConfigNV*>( this );
}
operator VkSetPresentConfigNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSetPresentConfigNV*>( this );
}
operator VkSetPresentConfigNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSetPresentConfigNV*>( this );
}
operator VkSetPresentConfigNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSetPresentConfigNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, numFramesPerBatch, presentConfigFeedback );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SetPresentConfigNV const & ) const = default;
#else
bool operator==( SetPresentConfigNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( numFramesPerBatch == rhs.numFramesPerBatch )
&& ( presentConfigFeedback == rhs.presentConfigFeedback );
#endif
}
bool operator!=( SetPresentConfigNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSetPresentConfigNV;
const void * pNext = {};
uint32_t numFramesPerBatch = {};
uint32_t presentConfigFeedback = {};
};
template <>
struct CppType<StructureType, StructureType::eSetPresentConfigNV>
{
using Type = SetPresentConfigNV;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
// wrapper struct for struct VkSetStateFlagsIndirectCommandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSetStateFlagsIndirectCommandNV.html
struct SetStateFlagsIndirectCommandNV
{
using NativeType = VkSetStateFlagsIndirectCommandNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV(uint32_t data_ = {}) VULKAN_HPP_NOEXCEPT
: data{ data_ }
{}
VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: SetStateFlagsIndirectCommandNV( *reinterpret_cast<SetStateFlagsIndirectCommandNV const *>( &rhs ) )
{}
SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT
{
data = data_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSetStateFlagsIndirectCommandNV*>( this );
}
operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSetStateFlagsIndirectCommandNV*>( this );
}
operator VkSetStateFlagsIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSetStateFlagsIndirectCommandNV*>( this );
}
operator VkSetStateFlagsIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSetStateFlagsIndirectCommandNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( data );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default;
#else
bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( data == rhs.data );
#endif
}
bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t data = {};
};
// wrapper struct for struct VkShaderCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderCreateInfoEXT.html
struct ShaderCreateInfoEXT
{
using NativeType = VkShaderCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT(VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ = {}, VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary, size_t codeSize_ = {}, const void * pCode_ = {}, const char * pName_ = {}, uint32_t setLayoutCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, uint32_t pushConstantRangeCount_ = {}, const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, stage{ stage_ }, nextStage{ nextStage_ }, codeType{ codeType_ }, codeSize{ codeSize_ }, pCode{ pCode_ }, pName{ pName_ }, setLayoutCount{ setLayoutCount_ }, pSetLayouts{ pSetLayouts_ }, pushConstantRangeCount{ pushConstantRangeCount_ }, pPushConstantRanges{ pPushConstantRanges_ }, pSpecializationInfo{ pSpecializationInfo_ }
{}
VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderCreateInfoEXT( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ShaderCreateInfoEXT( *reinterpret_cast<ShaderCreateInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_, VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_, VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & code_, const char * pName_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), stage( stage_ ), nextStage( nextStage_ ), codeType( codeType_ ), codeSize( code_.size() * sizeof(T) ), pCode( code_.data() ), pName( pName_ ), setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ), pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) ), pPushConstantRanges( pushConstantRanges_.data() ), pSpecializationInfo( pSpecializationInfo_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ShaderCreateInfoEXT & operator=( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ShaderCreateInfoEXT & operator=( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
{
stage = stage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setNextStage( VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ ) VULKAN_HPP_NOEXCEPT
{
nextStage = nextStage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeType( VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ ) VULKAN_HPP_NOEXCEPT
{
codeType = codeType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
{
codeSize = codeSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPCode( const void * pCode_ ) VULKAN_HPP_NOEXCEPT
{
pCode = pCode_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
ShaderCreateInfoEXT & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & code_ ) VULKAN_HPP_NOEXCEPT
{
codeSize = code_.size() * sizeof(T);
pCode = code_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
{
pName = pName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
{
setLayoutCount = setLayoutCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pSetLayouts = pSetLayouts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShaderCreateInfoEXT & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
{
setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
pSetLayouts = setLayouts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
{
pushConstantRangeCount = pushConstantRangeCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pPushConstantRanges = pPushConstantRanges_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShaderCreateInfoEXT & setPushConstantRanges( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
pPushConstantRanges = pushConstantRanges_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
{
pSpecializationInfo = pSpecializationInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkShaderCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderCreateInfoEXT*>( this );
}
operator VkShaderCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShaderCreateInfoEXT*>( this );
}
operator VkShaderCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkShaderCreateInfoEXT*>( this );
}
operator VkShaderCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkShaderCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT const &, size_t const &, const void * const &, const char * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PushConstantRange * const &, const VULKAN_HPP_NAMESPACE::SpecializationInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, stage, nextStage, codeType, codeSize, pCode, pName, setLayoutCount, pSetLayouts, pushConstantRangeCount, pPushConstantRanges, pSpecializationInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) return cmp;
if ( auto cmp = nextStage <=> rhs.nextStage; cmp != 0 ) return cmp;
if ( auto cmp = codeType <=> rhs.codeType; cmp != 0 ) return cmp;
if ( auto cmp = codeSize <=> rhs.codeSize; cmp != 0 ) return cmp;
if ( auto cmp = pCode <=> rhs.pCode; cmp != 0 ) return cmp;
if ( pName != rhs.pName )
if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = setLayoutCount <=> rhs.setLayoutCount; cmp != 0 ) return cmp;
if ( auto cmp = pSetLayouts <=> rhs.pSetLayouts; cmp != 0 ) return cmp;
if ( auto cmp = pushConstantRangeCount <=> rhs.pushConstantRangeCount; cmp != 0 ) return cmp;
if ( auto cmp = pPushConstantRanges <=> rhs.pPushConstantRanges; cmp != 0 ) return cmp;
if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( stage == rhs.stage )
&& ( nextStage == rhs.nextStage )
&& ( codeType == rhs.codeType )
&& ( codeSize == rhs.codeSize )
&& ( pCode == rhs.pCode )
&& ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) )
&& ( setLayoutCount == rhs.setLayoutCount )
&& ( pSetLayouts == rhs.pSetLayouts )
&& ( pushConstantRangeCount == rhs.pushConstantRangeCount )
&& ( pPushConstantRanges == rhs.pPushConstantRanges )
&& ( pSpecializationInfo == rhs.pSpecializationInfo );
}
bool operator!=( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage = {};
VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary;
size_t codeSize = {};
const void * pCode = {};
const char * pName = {};
uint32_t setLayoutCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
uint32_t pushConstantRangeCount = {};
const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {};
const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eShaderCreateInfoEXT>
{
using Type = ShaderCreateInfoEXT;
};
// wrapper struct for struct VkShaderModuleCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModuleCreateInfo.html
struct ShaderModuleCreateInfo
{
using NativeType = VkShaderModuleCreateInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo(VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, size_t codeSize_ = {}, const uint32_t * pCode_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, codeSize{ codeSize_ }, pCode{ pCode_ }
{}
VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ShaderModuleCreateInfo( *reinterpret_cast<ShaderModuleCreateInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
{
codeSize = codeSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT
{
pCode = pCode_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShaderModuleCreateInfo & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ ) VULKAN_HPP_NOEXCEPT
{
codeSize = code_.size() * 4;
pCode = code_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderModuleCreateInfo*>( this );
}
operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShaderModuleCreateInfo*>( this );
}
operator VkShaderModuleCreateInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkShaderModuleCreateInfo*>( this );
}
operator VkShaderModuleCreateInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkShaderModuleCreateInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags const &, size_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, codeSize, pCode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ShaderModuleCreateInfo const & ) const = default;
#else
bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( codeSize == rhs.codeSize )
&& ( pCode == rhs.pCode );
#endif
}
bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {};
size_t codeSize = {};
const uint32_t * pCode = {};
};
template <>
struct CppType<StructureType, StructureType::eShaderModuleCreateInfo>
{
using Type = ShaderModuleCreateInfo;
};
// wrapper struct for struct VkShaderModuleIdentifierEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModuleIdentifierEXT.html
struct ShaderModuleIdentifierEXT
{
using NativeType = VkShaderModuleIdentifierEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleIdentifierEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT(uint32_t identifierSize_ = {}, std::array<uint8_t,VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT> const & identifier_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, identifierSize{ identifierSize_ }, identifier{ identifier_ }
{}
VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderModuleIdentifierEXT( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ShaderModuleIdentifierEXT( *reinterpret_cast<ShaderModuleIdentifierEXT const *>( &rhs ) )
{}
ShaderModuleIdentifierEXT & operator=( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ShaderModuleIdentifierEXT & operator=( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT const *>( &rhs );
return *this;
}
operator VkShaderModuleIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderModuleIdentifierEXT*>( this );
}
operator VkShaderModuleIdentifierEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShaderModuleIdentifierEXT*>( this );
}
operator VkShaderModuleIdentifierEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkShaderModuleIdentifierEXT*>( this );
}
operator VkShaderModuleIdentifierEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkShaderModuleIdentifierEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, identifierSize, identifier );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = identifierSize <=> rhs.identifierSize; cmp != 0 ) return cmp;
for ( size_t i = 0; i < identifierSize; ++i )
{
if ( auto cmp = identifier[i] <=> rhs.identifier[i]; cmp != 0 ) return cmp;
}
return std::strong_ordering::equivalent;
}
#endif
bool operator==( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( identifierSize == rhs.identifierSize )
&& ( memcmp( identifier, rhs.identifier, identifierSize * sizeof( uint8_t ) ) == 0 );
}
bool operator!=( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleIdentifierEXT;
void * pNext = {};
uint32_t identifierSize = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT> identifier = {};
};
template <>
struct CppType<StructureType, StructureType::eShaderModuleIdentifierEXT>
{
using Type = ShaderModuleIdentifierEXT;
};
// wrapper struct for struct VkShaderModuleValidationCacheCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModuleValidationCacheCreateInfoEXT.html
struct ShaderModuleValidationCacheCreateInfoEXT
{
using NativeType = VkShaderModuleValidationCacheCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, validationCache{ validationCache_ }
{}
VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ShaderModuleValidationCacheCreateInfoEXT( *reinterpret_cast<ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs ) )
{}
ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT
{
validationCache = validationCache_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT*>( this );
}
operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT*>( this );
}
operator VkShaderModuleValidationCacheCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT*>( this );
}
operator VkShaderModuleValidationCacheCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ValidationCacheEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, validationCache );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default;
#else
bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( validationCache == rhs.validationCache );
#endif
}
bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {};
};
template <>
struct CppType<StructureType, StructureType::eShaderModuleValidationCacheCreateInfoEXT>
{
using Type = ShaderModuleValidationCacheCreateInfoEXT;
};
// wrapper struct for struct VkShaderResourceUsageAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderResourceUsageAMD.html
struct ShaderResourceUsageAMD
{
using NativeType = VkShaderResourceUsageAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD(uint32_t numUsedVgprs_ = {}, uint32_t numUsedSgprs_ = {}, uint32_t ldsSizePerLocalWorkGroup_ = {}, size_t ldsUsageSizeInBytes_ = {}, size_t scratchMemUsageInBytes_ = {}) VULKAN_HPP_NOEXCEPT
: numUsedVgprs{ numUsedVgprs_ }, numUsedSgprs{ numUsedSgprs_ }, ldsSizePerLocalWorkGroup{ ldsSizePerLocalWorkGroup_ }, ldsUsageSizeInBytes{ ldsUsageSizeInBytes_ }, scratchMemUsageInBytes{ scratchMemUsageInBytes_ }
{}
VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: ShaderResourceUsageAMD( *reinterpret_cast<ShaderResourceUsageAMD const *>( &rhs ) )
{}
ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const *>( &rhs );
return *this;
}
operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderResourceUsageAMD*>( this );
}
operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShaderResourceUsageAMD*>( this );
}
operator VkShaderResourceUsageAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkShaderResourceUsageAMD*>( this );
}
operator VkShaderResourceUsageAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkShaderResourceUsageAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, size_t const &, size_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( numUsedVgprs, numUsedSgprs, ldsSizePerLocalWorkGroup, ldsUsageSizeInBytes, scratchMemUsageInBytes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ShaderResourceUsageAMD const & ) const = default;
#else
bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( numUsedVgprs == rhs.numUsedVgprs )
&& ( numUsedSgprs == rhs.numUsedSgprs )
&& ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup )
&& ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes )
&& ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes );
#endif
}
bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t numUsedVgprs = {};
uint32_t numUsedSgprs = {};
uint32_t ldsSizePerLocalWorkGroup = {};
size_t ldsUsageSizeInBytes = {};
size_t scratchMemUsageInBytes = {};
};
// wrapper struct for struct VkShaderStatisticsInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderStatisticsInfoAMD.html
struct ShaderStatisticsInfoAMD
{
using NativeType = VkShaderStatisticsInfoAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD(VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, uint32_t numPhysicalVgprs_ = {}, uint32_t numPhysicalSgprs_ = {}, uint32_t numAvailableVgprs_ = {}, uint32_t numAvailableSgprs_ = {}, std::array<uint32_t,3> const & computeWorkGroupSize_ = {}) VULKAN_HPP_NOEXCEPT
: shaderStageMask{ shaderStageMask_ }, resourceUsage{ resourceUsage_ }, numPhysicalVgprs{ numPhysicalVgprs_ }, numPhysicalSgprs{ numPhysicalSgprs_ }, numAvailableVgprs{ numAvailableVgprs_ }, numAvailableSgprs{ numAvailableSgprs_ }, computeWorkGroupSize{ computeWorkGroupSize_ }
{}
VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: ShaderStatisticsInfoAMD( *reinterpret_cast<ShaderStatisticsInfoAMD const *>( &rhs ) )
{}
ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const *>( &rhs );
return *this;
}
operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderStatisticsInfoAMD*>( this );
}
operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShaderStatisticsInfoAMD*>( this );
}
operator VkShaderStatisticsInfoAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkShaderStatisticsInfoAMD*>( this );
}
operator VkShaderStatisticsInfoAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkShaderStatisticsInfoAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( shaderStageMask, resourceUsage, numPhysicalVgprs, numPhysicalSgprs, numAvailableVgprs, numAvailableSgprs, computeWorkGroupSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default;
#else
bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( shaderStageMask == rhs.shaderStageMask )
&& ( resourceUsage == rhs.resourceUsage )
&& ( numPhysicalVgprs == rhs.numPhysicalVgprs )
&& ( numPhysicalSgprs == rhs.numPhysicalSgprs )
&& ( numAvailableVgprs == rhs.numAvailableVgprs )
&& ( numAvailableSgprs == rhs.numAvailableSgprs )
&& ( computeWorkGroupSize == rhs.computeWorkGroupSize );
#endif
}
bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {};
VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {};
uint32_t numPhysicalVgprs = {};
uint32_t numPhysicalSgprs = {};
uint32_t numAvailableVgprs = {};
uint32_t numAvailableSgprs = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> computeWorkGroupSize = {};
};
// wrapper struct for struct VkSharedPresentSurfaceCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSharedPresentSurfaceCapabilitiesKHR.html
struct SharedPresentSurfaceCapabilitiesKHR
{
using NativeType = VkSharedPresentSurfaceCapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, sharedPresentSupportedUsageFlags{ sharedPresentSupportedUsageFlags_ }
{}
VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast<SharedPresentSurfaceCapabilitiesKHR const *>( &rhs ) )
{}
SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR*>( this );
}
operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>( this );
}
operator VkSharedPresentSurfaceCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR*>( this );
}
operator VkSharedPresentSurfaceCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, sharedPresentSupportedUsageFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default;
#else
bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
#endif
}
bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {};
};
template <>
struct CppType<StructureType, StructureType::eSharedPresentSurfaceCapabilitiesKHR>
{
using Type = SharedPresentSurfaceCapabilitiesKHR;
};
// wrapper struct for struct VkSparseImageFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageFormatProperties.html
struct SparseImageFormatProperties
{
using NativeType = VkSparseImageFormatProperties;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SparseImageFormatProperties(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
: aspectMask{ aspectMask_ }, imageGranularity{ imageGranularity_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageFormatProperties( *reinterpret_cast<SparseImageFormatProperties const *>( &rhs ) )
{}
SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const *>( &rhs );
return *this;
}
operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageFormatProperties*>( this );
}
operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageFormatProperties*>( this );
}
operator VkSparseImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSparseImageFormatProperties*>( this );
}
operator VkSparseImageFormatProperties *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSparseImageFormatProperties*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, VULKAN_HPP_NAMESPACE::Extent3D const &, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( aspectMask, imageGranularity, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SparseImageFormatProperties const & ) const = default;
#else
bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( aspectMask == rhs.aspectMask )
&& ( imageGranularity == rhs.imageGranularity )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {};
VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {};
};
// wrapper struct for struct VkSparseImageFormatProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageFormatProperties2.html
struct SparseImageFormatProperties2
{
using NativeType = VkSparseImageFormatProperties2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, properties{ properties_ }
{}
VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageFormatProperties2( *reinterpret_cast<SparseImageFormatProperties2 const *>( &rhs ) )
{}
SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const *>( &rhs );
return *this;
}
operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageFormatProperties2*>( this );
}
operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageFormatProperties2*>( this );
}
operator VkSparseImageFormatProperties2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSparseImageFormatProperties2*>( this );
}
operator VkSparseImageFormatProperties2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSparseImageFormatProperties2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, properties );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SparseImageFormatProperties2 const & ) const = default;
#else
bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( properties == rhs.properties );
#endif
}
bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2;
void * pNext = {};
VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {};
};
template <>
struct CppType<StructureType, StructureType::eSparseImageFormatProperties2>
{
using Type = SparseImageFormatProperties2;
};
using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
// wrapper struct for struct VkSparseImageMemoryRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryRequirements.html
struct SparseImageMemoryRequirements
{
using NativeType = VkSparseImageMemoryRequirements;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, uint32_t imageMipTailFirstLod_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {}) VULKAN_HPP_NOEXCEPT
: formatProperties{ formatProperties_ }, imageMipTailFirstLod{ imageMipTailFirstLod_ }, imageMipTailSize{ imageMipTailSize_ }, imageMipTailOffset{ imageMipTailOffset_ }, imageMipTailStride{ imageMipTailStride_ }
{}
VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageMemoryRequirements( *reinterpret_cast<SparseImageMemoryRequirements const *>( &rhs ) )
{}
SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const *>( &rhs );
return *this;
}
operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageMemoryRequirements*>( this );
}
operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageMemoryRequirements*>( this );
}
operator VkSparseImageMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSparseImageMemoryRequirements*>( this );
}
operator VkSparseImageMemoryRequirements *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSparseImageMemoryRequirements*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( formatProperties, imageMipTailFirstLod, imageMipTailSize, imageMipTailOffset, imageMipTailStride );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SparseImageMemoryRequirements const & ) const = default;
#else
bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( formatProperties == rhs.formatProperties )
&& ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
&& ( imageMipTailSize == rhs.imageMipTailSize )
&& ( imageMipTailOffset == rhs.imageMipTailOffset )
&& ( imageMipTailStride == rhs.imageMipTailStride );
#endif
}
bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {};
uint32_t imageMipTailFirstLod = {};
VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {};
};
// wrapper struct for struct VkSparseImageMemoryRequirements2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryRequirements2.html
struct SparseImageMemoryRequirements2
{
using NativeType = VkSparseImageMemoryRequirements2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2(VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryRequirements{ memoryRequirements_ }
{}
VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageMemoryRequirements2( *reinterpret_cast<SparseImageMemoryRequirements2 const *>( &rhs ) )
{}
SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const *>( &rhs );
return *this;
}
operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageMemoryRequirements2*>( this );
}
operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageMemoryRequirements2*>( this );
}
operator VkSparseImageMemoryRequirements2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSparseImageMemoryRequirements2*>( this );
}
operator VkSparseImageMemoryRequirements2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSparseImageMemoryRequirements2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryRequirements );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default;
#else
bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryRequirements == rhs.memoryRequirements );
#endif
}
bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2;
void * pNext = {};
VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {};
};
template <>
struct CppType<StructureType, StructureType::eSparseImageMemoryRequirements2>
{
using Type = SparseImageMemoryRequirements2;
};
using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
#if defined( VK_USE_PLATFORM_GGP )
// wrapper struct for struct VkStreamDescriptorSurfaceCreateInfoGGP, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStreamDescriptorSurfaceCreateInfoGGP.html
struct StreamDescriptorSurfaceCreateInfoGGP
{
using NativeType = VkStreamDescriptorSurfaceCreateInfoGGP;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP(VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, GgpStreamDescriptor streamDescriptor_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, streamDescriptor{ streamDescriptor_ }
{}
VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
: StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast<StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs ) )
{}
StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT
{
streamDescriptor = streamDescriptor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
}
operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
}
operator VkStreamDescriptorSurfaceCreateInfoGGP const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
}
operator VkStreamDescriptorSurfaceCreateInfoGGP *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP const &, GgpStreamDescriptor const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, streamDescriptor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 );
}
bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {};
GgpStreamDescriptor streamDescriptor = {};
};
template <>
struct CppType<StructureType, StructureType::eStreamDescriptorSurfaceCreateInfoGGP>
{
using Type = StreamDescriptorSurfaceCreateInfoGGP;
};
#endif /*VK_USE_PLATFORM_GGP*/
// wrapper struct for struct VkSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubmitInfo.html
struct SubmitInfo
{
using NativeType = VkSubmitInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubmitInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {}, uint32_t commandBufferCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, waitSemaphoreCount{ waitSemaphoreCount_ }, pWaitSemaphores{ pWaitSemaphores_ }, pWaitDstStageMask{ pWaitDstStageMask_ }, commandBufferCount{ commandBufferCount_ }, pCommandBuffers{ pCommandBuffers_ }, signalSemaphoreCount{ signalSemaphoreCount_ }, pSignalSemaphores{ pSignalSemaphores_ }
{}
VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SubmitInfo( *reinterpret_cast<SubmitInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), pWaitDstStageMask( waitDstStageMask_.data() ), commandBufferCount( static_cast<uint32_t>( commandBuffers_.size() ) ), pCommandBuffers( commandBuffers_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() );
#else
if ( waitSemaphores_.size() != waitDstStageMask_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = waitSemaphoreCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphores = pWaitSemaphores_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubmitInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
pWaitSemaphores = waitSemaphores_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
pWaitDstStageMask = pWaitDstStageMask_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = static_cast<uint32_t>( waitDstStageMask_.size() );
pWaitDstStageMask = waitDstStageMask_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferCount = commandBufferCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
{
pCommandBuffers = pCommandBuffers_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubmitInfo & setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferCount = static_cast<uint32_t>( commandBuffers_.size() );
pCommandBuffers = commandBuffers_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = signalSemaphoreCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphores = pSignalSemaphores_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubmitInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
pSignalSemaphores = signalSemaphores_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubmitInfo*>( this );
}
operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubmitInfo*>( this );
}
operator VkSubmitInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubmitInfo*>( this );
}
operator VkSubmitInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubmitInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &, const VULKAN_HPP_NAMESPACE::PipelineStageFlags * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::CommandBuffer * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, pWaitDstStageMask, commandBufferCount, pCommandBuffers, signalSemaphoreCount, pSignalSemaphores );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubmitInfo const & ) const = default;
#else
bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( waitSemaphoreCount == rhs.waitSemaphoreCount )
&& ( pWaitSemaphores == rhs.pWaitSemaphores )
&& ( pWaitDstStageMask == rhs.pWaitDstStageMask )
&& ( commandBufferCount == rhs.commandBufferCount )
&& ( pCommandBuffers == rhs.pCommandBuffers )
&& ( signalSemaphoreCount == rhs.signalSemaphoreCount )
&& ( pSignalSemaphores == rhs.pSignalSemaphores );
#endif
}
bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo;
const void * pNext = {};
uint32_t waitSemaphoreCount = {};
const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {};
const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask = {};
uint32_t commandBufferCount = {};
const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers = {};
uint32_t signalSemaphoreCount = {};
const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {};
};
template <>
struct CppType<StructureType, StructureType::eSubmitInfo>
{
using Type = SubmitInfo;
};
// wrapper struct for struct VkSubmitInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubmitInfo2.html
struct SubmitInfo2
{
using NativeType = VkSubmitInfo2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubmitInfo2(VULKAN_HPP_NAMESPACE::SubmitFlags flags_ = {}, uint32_t waitSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ = {}, uint32_t commandBufferInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ = {}, uint32_t signalSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, waitSemaphoreInfoCount{ waitSemaphoreInfoCount_ }, pWaitSemaphoreInfos{ pWaitSemaphoreInfos_ }, commandBufferInfoCount{ commandBufferInfoCount_ }, pCommandBufferInfos{ pCommandBufferInfos_ }, signalSemaphoreInfoCount{ signalSemaphoreInfoCount_ }, pSignalSemaphoreInfos{ pSignalSemaphoreInfos_ }
{}
VULKAN_HPP_CONSTEXPR SubmitInfo2( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubmitInfo2( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: SubmitInfo2( *reinterpret_cast<SubmitInfo2 const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & waitSemaphoreInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const & commandBufferInfos_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & signalSemaphoreInfos_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), waitSemaphoreInfoCount( static_cast<uint32_t>( waitSemaphoreInfos_.size() ) ), pWaitSemaphoreInfos( waitSemaphoreInfos_.data() ), commandBufferInfoCount( static_cast<uint32_t>( commandBufferInfos_.size() ) ), pCommandBufferInfos( commandBufferInfos_.data() ), signalSemaphoreInfoCount( static_cast<uint32_t>( signalSemaphoreInfos_.size() ) ), pSignalSemaphoreInfos( signalSemaphoreInfos_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SubmitInfo2 & operator=( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubmitInfo2 & operator=( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo2 const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreInfoCount = waitSemaphoreInfoCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphoreInfos = pWaitSemaphoreInfos_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubmitInfo2 & setWaitSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreInfoCount = static_cast<uint32_t>( waitSemaphoreInfos_.size() );
pWaitSemaphoreInfos = waitSemaphoreInfos_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferInfoCount = commandBufferInfoCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPCommandBufferInfos( const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
{
pCommandBufferInfos = pCommandBufferInfos_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubmitInfo2 & setCommandBufferInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const & commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferInfoCount = static_cast<uint32_t>( commandBufferInfos_.size() );
pCommandBufferInfos = commandBufferInfos_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreInfoCount = signalSemaphoreInfoCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPSignalSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphoreInfos = pSignalSemaphoreInfos_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubmitInfo2 & setSignalSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreInfoCount = static_cast<uint32_t>( signalSemaphoreInfos_.size() );
pSignalSemaphoreInfos = signalSemaphoreInfos_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubmitInfo2*>( this );
}
operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubmitInfo2*>( this );
}
operator VkSubmitInfo2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubmitInfo2*>( this );
}
operator VkSubmitInfo2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubmitInfo2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SubmitFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, waitSemaphoreInfoCount, pWaitSemaphoreInfos, commandBufferInfoCount, pCommandBufferInfos, signalSemaphoreInfoCount, pSignalSemaphoreInfos );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubmitInfo2 const & ) const = default;
#else
bool operator==( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount )
&& ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos )
&& ( commandBufferInfoCount == rhs.commandBufferInfoCount )
&& ( pCommandBufferInfos == rhs.pCommandBufferInfos )
&& ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount )
&& ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos );
#endif
}
bool operator!=( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SubmitFlags flags = {};
uint32_t waitSemaphoreInfoCount = {};
const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos = {};
uint32_t commandBufferInfoCount = {};
const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos = {};
uint32_t signalSemaphoreInfoCount = {};
const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos = {};
};
template <>
struct CppType<StructureType, StructureType::eSubmitInfo2>
{
using Type = SubmitInfo2;
};
using SubmitInfo2KHR = SubmitInfo2;
// wrapper struct for struct VkSubpassBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassBeginInfo.html
struct SubpassBeginInfo
{
using NativeType = VkSubpassBeginInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubpassBeginInfo(VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, contents{ contents_ }
{}
VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) )
{}
SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
{
contents = contents_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassBeginInfo*>( this );
}
operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassBeginInfo*>( this );
}
operator VkSubpassBeginInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubpassBeginInfo*>( this );
}
operator VkSubpassBeginInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubpassBeginInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SubpassContents const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, contents );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubpassBeginInfo const & ) const = default;
#else
bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( contents == rhs.contents );
#endif
}
bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
};
template <>
struct CppType<StructureType, StructureType::eSubpassBeginInfo>
{
using Type = SubpassBeginInfo;
};
using SubpassBeginInfoKHR = SubpassBeginInfo;
// wrapper struct for struct VkSubpassDescriptionDepthStencilResolve, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDescriptionDepthStencilResolve.html
struct SubpassDescriptionDepthStencilResolve
{
using NativeType = VkSubpassDescriptionDepthStencilResolve;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve(VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, depthResolveMode{ depthResolveMode_ }, stencilResolveMode{ stencilResolveMode_ }, pDepthStencilResolveAttachment{ pDepthStencilResolveAttachment_ }
{}
VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
: SubpassDescriptionDepthStencilResolve( *reinterpret_cast<SubpassDescriptionDepthStencilResolve const *>( &rhs ) )
{}
SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
{
depthResolveMode = depthResolveMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
{
stencilResolveMode = stencilResolveMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve*>( this );
}
operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolve*>( this );
}
operator VkSubpassDescriptionDepthStencilResolve const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve*>( this );
}
operator VkSubpassDescriptionDepthStencilResolve *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubpassDescriptionDepthStencilResolve*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, depthResolveMode, stencilResolveMode, pDepthStencilResolveAttachment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default;
#else
bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( depthResolveMode == rhs.depthResolveMode )
&& ( stencilResolveMode == rhs.stencilResolveMode )
&& ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment );
#endif
}
bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {};
};
template <>
struct CppType<StructureType, StructureType::eSubpassDescriptionDepthStencilResolve>
{
using Type = SubpassDescriptionDepthStencilResolve;
};
using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;
// wrapper struct for struct VkSubpassEndInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassEndInfo.html
struct SubpassEndInfo
{
using NativeType = VkSubpassEndInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubpassEndInfo(const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }
{}
VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) )
{}
SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassEndInfo*>( this );
}
operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassEndInfo*>( this );
}
operator VkSubpassEndInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubpassEndInfo*>( this );
}
operator VkSubpassEndInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubpassEndInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubpassEndInfo const & ) const = default;
#else
bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext );
#endif
}
bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
const void * pNext = {};
};
template <>
struct CppType<StructureType, StructureType::eSubpassEndInfo>
{
using Type = SubpassEndInfo;
};
using SubpassEndInfoKHR = SubpassEndInfo;
// wrapper struct for struct VkSubpassResolvePerformanceQueryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassResolvePerformanceQueryEXT.html
struct SubpassResolvePerformanceQueryEXT
{
using NativeType = VkSubpassResolvePerformanceQueryEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassResolvePerformanceQueryEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT(VULKAN_HPP_NAMESPACE::Bool32 optimal_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, optimal{ optimal_ }
{}
VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassResolvePerformanceQueryEXT( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SubpassResolvePerformanceQueryEXT( *reinterpret_cast<SubpassResolvePerformanceQueryEXT const *>( &rhs ) )
{}
SubpassResolvePerformanceQueryEXT & operator=( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubpassResolvePerformanceQueryEXT & operator=( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT const *>( &rhs );
return *this;
}
operator VkSubpassResolvePerformanceQueryEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassResolvePerformanceQueryEXT*>( this );
}
operator VkSubpassResolvePerformanceQueryEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassResolvePerformanceQueryEXT*>( this );
}
operator VkSubpassResolvePerformanceQueryEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubpassResolvePerformanceQueryEXT*>( this );
}
operator VkSubpassResolvePerformanceQueryEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubpassResolvePerformanceQueryEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, optimal );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubpassResolvePerformanceQueryEXT const & ) const = default;
#else
bool operator==( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( optimal == rhs.optimal );
#endif
}
bool operator!=( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassResolvePerformanceQueryEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 optimal = {};
};
template <>
struct CppType<StructureType, StructureType::eSubpassResolvePerformanceQueryEXT>
{
using Type = SubpassResolvePerformanceQueryEXT;
};
// wrapper struct for struct VkSubpassShadingPipelineCreateInfoHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassShadingPipelineCreateInfoHUAWEI.html
struct SubpassShadingPipelineCreateInfoHUAWEI
{
using NativeType = VkSubpassShadingPipelineCreateInfoHUAWEI;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, renderPass{ renderPass_ }, subpass{ subpass_ }
{}
VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassShadingPipelineCreateInfoHUAWEI( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
: SubpassShadingPipelineCreateInfoHUAWEI( *reinterpret_cast<SubpassShadingPipelineCreateInfoHUAWEI const *>( &rhs ) )
{}
SubpassShadingPipelineCreateInfoHUAWEI & operator=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubpassShadingPipelineCreateInfoHUAWEI & operator=( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
{
subpass = subpass_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassShadingPipelineCreateInfoHUAWEI*>( this );
}
operator VkSubpassShadingPipelineCreateInfoHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassShadingPipelineCreateInfoHUAWEI*>( this );
}
operator VkSubpassShadingPipelineCreateInfoHUAWEI const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubpassShadingPipelineCreateInfoHUAWEI*>( this );
}
operator VkSubpassShadingPipelineCreateInfoHUAWEI *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubpassShadingPipelineCreateInfoHUAWEI*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, renderPass, subpass );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubpassShadingPipelineCreateInfoHUAWEI const & ) const = default;
#else
bool operator==( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( renderPass == rhs.renderPass )
&& ( subpass == rhs.subpass );
#endif
}
bool operator!=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI;
void * pNext = {};
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
uint32_t subpass = {};
};
template <>
struct CppType<StructureType, StructureType::eSubpassShadingPipelineCreateInfoHUAWEI>
{
using Type = SubpassShadingPipelineCreateInfoHUAWEI;
};
// wrapper struct for struct VkSubresourceHostMemcpySize, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubresourceHostMemcpySize.html
struct SubresourceHostMemcpySize
{
using NativeType = VkSubresourceHostMemcpySize;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceHostMemcpySize;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySize(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySize( SubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubresourceHostMemcpySize( VkSubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT
: SubresourceHostMemcpySize( *reinterpret_cast<SubresourceHostMemcpySize const *>( &rhs ) )
{}
SubresourceHostMemcpySize & operator=( SubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubresourceHostMemcpySize & operator=( VkSubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySize const *>( &rhs );
return *this;
}
operator VkSubresourceHostMemcpySize const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubresourceHostMemcpySize*>( this );
}
operator VkSubresourceHostMemcpySize &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubresourceHostMemcpySize*>( this );
}
operator VkSubresourceHostMemcpySize const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubresourceHostMemcpySize*>( this );
}
operator VkSubresourceHostMemcpySize *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubresourceHostMemcpySize*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubresourceHostMemcpySize const & ) const = default;
#else
bool operator==( SubresourceHostMemcpySize const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( size == rhs.size );
#endif
}
bool operator!=( SubresourceHostMemcpySize const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceHostMemcpySize;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
template <>
struct CppType<StructureType, StructureType::eSubresourceHostMemcpySize>
{
using Type = SubresourceHostMemcpySize;
};
using SubresourceHostMemcpySizeEXT = SubresourceHostMemcpySize;
// wrapper struct for struct VkSubresourceLayout2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubresourceLayout2.html
struct SubresourceLayout2
{
using NativeType = VkSubresourceLayout2;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubresourceLayout2(VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, subresourceLayout{ subresourceLayout_ }
{}
VULKAN_HPP_CONSTEXPR SubresourceLayout2( SubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubresourceLayout2( VkSubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT
: SubresourceLayout2( *reinterpret_cast<SubresourceLayout2 const *>( &rhs ) )
{}
SubresourceLayout2 & operator=( SubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SubresourceLayout2 & operator=( VkSubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout2 const *>( &rhs );
return *this;
}
operator VkSubresourceLayout2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubresourceLayout2*>( this );
}
operator VkSubresourceLayout2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubresourceLayout2*>( this );
}
operator VkSubresourceLayout2 const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSubresourceLayout2*>( this );
}
operator VkSubresourceLayout2 *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSubresourceLayout2*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SubresourceLayout const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, subresourceLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SubresourceLayout2 const & ) const = default;
#else
bool operator==( SubresourceLayout2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( subresourceLayout == rhs.subresourceLayout );
#endif
}
bool operator!=( SubresourceLayout2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2;
void * pNext = {};
VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout = {};
};
template <>
struct CppType<StructureType, StructureType::eSubresourceLayout2>
{
using Type = SubresourceLayout2;
};
using SubresourceLayout2EXT = SubresourceLayout2;
using SubresourceLayout2KHR = SubresourceLayout2;
// wrapper struct for struct VkSurfaceCapabilities2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilities2EXT.html
struct SurfaceCapabilities2EXT
{
using NativeType = VkSurfaceCapabilities2EXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minImageCount{ minImageCount_ }, maxImageCount{ maxImageCount_ }, currentExtent{ currentExtent_ }, minImageExtent{ minImageExtent_ }, maxImageExtent{ maxImageExtent_ }, maxImageArrayLayers{ maxImageArrayLayers_ }, supportedTransforms{ supportedTransforms_ }, currentTransform{ currentTransform_ }, supportedCompositeAlpha{ supportedCompositeAlpha_ }, supportedUsageFlags{ supportedUsageFlags_ }, supportedSurfaceCounters{ supportedSurfaceCounters_ }
{}
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceCapabilities2EXT( *reinterpret_cast<SurfaceCapabilities2EXT const *>( &rhs ) )
{}
SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const *>( &rhs );
return *this;
}
operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>( this );
}
operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceCapabilities2EXT*>( this );
}
operator VkSurfaceCapabilities2EXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfaceCapabilities2EXT*>( this );
}
operator VkSurfaceCapabilities2EXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfaceCapabilities2EXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minImageCount, maxImageCount, currentExtent, minImageExtent, maxImageExtent, maxImageArrayLayers, supportedTransforms, currentTransform, supportedCompositeAlpha, supportedUsageFlags, supportedSurfaceCounters );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfaceCapabilities2EXT const & ) const = default;
#else
bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minImageCount == rhs.minImageCount )
&& ( maxImageCount == rhs.maxImageCount )
&& ( currentExtent == rhs.currentExtent )
&& ( minImageExtent == rhs.minImageExtent )
&& ( maxImageExtent == rhs.maxImageExtent )
&& ( maxImageArrayLayers == rhs.maxImageArrayLayers )
&& ( supportedTransforms == rhs.supportedTransforms )
&& ( currentTransform == rhs.currentTransform )
&& ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
&& ( supportedUsageFlags == rhs.supportedUsageFlags )
&& ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
#endif
}
bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
void * pNext = {};
uint32_t minImageCount = {};
uint32_t maxImageCount = {};
VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
uint32_t maxImageArrayLayers = {};
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {};
};
template <>
struct CppType<StructureType, StructureType::eSurfaceCapabilities2EXT>
{
using Type = SurfaceCapabilities2EXT;
};
// wrapper struct for struct VkSurfaceCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesKHR.html
struct SurfaceCapabilitiesKHR
{
using NativeType = VkSurfaceCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT
: minImageCount{ minImageCount_ }, maxImageCount{ maxImageCount_ }, currentExtent{ currentExtent_ }, minImageExtent{ minImageExtent_ }, maxImageExtent{ maxImageExtent_ }, maxImageArrayLayers{ maxImageArrayLayers_ }, supportedTransforms{ supportedTransforms_ }, currentTransform{ currentTransform_ }, supportedCompositeAlpha{ supportedCompositeAlpha_ }, supportedUsageFlags{ supportedUsageFlags_ }
{}
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceCapabilitiesKHR( *reinterpret_cast<SurfaceCapabilitiesKHR const *>( &rhs ) )
{}
SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>( this );
}
operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceCapabilitiesKHR*>( this );
}
operator VkSurfaceCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfaceCapabilitiesKHR*>( this );
}
operator VkSurfaceCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfaceCapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( minImageCount, maxImageCount, currentExtent, minImageExtent, maxImageExtent, maxImageArrayLayers, supportedTransforms, currentTransform, supportedCompositeAlpha, supportedUsageFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default;
#else
bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( minImageCount == rhs.minImageCount )
&& ( maxImageCount == rhs.maxImageCount )
&& ( currentExtent == rhs.currentExtent )
&& ( minImageExtent == rhs.minImageExtent )
&& ( maxImageExtent == rhs.maxImageExtent )
&& ( maxImageArrayLayers == rhs.maxImageArrayLayers )
&& ( supportedTransforms == rhs.supportedTransforms )
&& ( currentTransform == rhs.currentTransform )
&& ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
&& ( supportedUsageFlags == rhs.supportedUsageFlags );
#endif
}
bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t minImageCount = {};
uint32_t maxImageCount = {};
VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
uint32_t maxImageArrayLayers = {};
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
};
// wrapper struct for struct VkSurfaceCapabilities2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilities2KHR.html
struct SurfaceCapabilities2KHR
{
using NativeType = VkSurfaceCapabilities2KHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, surfaceCapabilities{ surfaceCapabilities_ }
{}
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceCapabilities2KHR( *reinterpret_cast<SurfaceCapabilities2KHR const *>( &rhs ) )
{}
SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const *>( &rhs );
return *this;
}
operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceCapabilities2KHR*>( this );
}
operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceCapabilities2KHR*>( this );
}
operator VkSurfaceCapabilities2KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfaceCapabilities2KHR*>( this );
}
operator VkSurfaceCapabilities2KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfaceCapabilities2KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, surfaceCapabilities );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfaceCapabilities2KHR const & ) const = default;
#else
bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( surfaceCapabilities == rhs.surfaceCapabilities );
#endif
}
bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {};
};
template <>
struct CppType<StructureType, StructureType::eSurfaceCapabilities2KHR>
{
using Type = SurfaceCapabilities2KHR;
};
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkSurfaceCapabilitiesFullScreenExclusiveEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesFullScreenExclusiveEXT.html
struct SurfaceCapabilitiesFullScreenExclusiveEXT
{
using NativeType = VkSurfaceCapabilitiesFullScreenExclusiveEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fullScreenExclusiveSupported{ fullScreenExclusiveSupported_ }
{}
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceCapabilitiesFullScreenExclusiveEXT( *reinterpret_cast<SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs ) )
{}
SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs );
return *this;
}
operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
}
operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
}
operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
}
operator VkSurfaceCapabilitiesFullScreenExclusiveEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fullScreenExclusiveSupported );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default;
#else
bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported );
#endif
}
bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {};
};
template <>
struct CppType<StructureType, StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT>
{
using Type = SurfaceCapabilitiesFullScreenExclusiveEXT;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
// wrapper struct for struct VkSurfaceCapabilitiesPresentBarrierNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesPresentBarrierNV.html
struct SurfaceCapabilitiesPresentBarrierNV
{
using NativeType = VkSurfaceCapabilitiesPresentBarrierNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesPresentBarrierNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV(VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentBarrierSupported{ presentBarrierSupported_ }
{}
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceCapabilitiesPresentBarrierNV( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceCapabilitiesPresentBarrierNV( *reinterpret_cast<SurfaceCapabilitiesPresentBarrierNV const *>( &rhs ) )
{}
SurfaceCapabilitiesPresentBarrierNV & operator=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfaceCapabilitiesPresentBarrierNV & operator=( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV const *>( &rhs );
return *this;
}
operator VkSurfaceCapabilitiesPresentBarrierNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceCapabilitiesPresentBarrierNV*>( this );
}
operator VkSurfaceCapabilitiesPresentBarrierNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceCapabilitiesPresentBarrierNV*>( this );
}
operator VkSurfaceCapabilitiesPresentBarrierNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfaceCapabilitiesPresentBarrierNV*>( this );
}
operator VkSurfaceCapabilitiesPresentBarrierNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfaceCapabilitiesPresentBarrierNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentBarrierSupported );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfaceCapabilitiesPresentBarrierNV const & ) const = default;
#else
bool operator==( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentBarrierSupported == rhs.presentBarrierSupported );
#endif
}
bool operator!=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesPresentBarrierNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported = {};
};
template <>
struct CppType<StructureType, StructureType::eSurfaceCapabilitiesPresentBarrierNV>
{
using Type = SurfaceCapabilitiesPresentBarrierNV;
};
// wrapper struct for struct VkSurfaceCapabilitiesPresentId2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesPresentId2KHR.html
struct SurfaceCapabilitiesPresentId2KHR
{
using NativeType = VkSurfaceCapabilitiesPresentId2KHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesPresentId2KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentId2KHR(VULKAN_HPP_NAMESPACE::Bool32 presentId2Supported_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentId2Supported{ presentId2Supported_ }
{}
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentId2KHR( SurfaceCapabilitiesPresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceCapabilitiesPresentId2KHR( VkSurfaceCapabilitiesPresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceCapabilitiesPresentId2KHR( *reinterpret_cast<SurfaceCapabilitiesPresentId2KHR const *>( &rhs ) )
{}
SurfaceCapabilitiesPresentId2KHR & operator=( SurfaceCapabilitiesPresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfaceCapabilitiesPresentId2KHR & operator=( VkSurfaceCapabilitiesPresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentId2KHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentId2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentId2KHR & setPresentId2Supported( VULKAN_HPP_NAMESPACE::Bool32 presentId2Supported_ ) VULKAN_HPP_NOEXCEPT
{
presentId2Supported = presentId2Supported_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSurfaceCapabilitiesPresentId2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceCapabilitiesPresentId2KHR*>( this );
}
operator VkSurfaceCapabilitiesPresentId2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceCapabilitiesPresentId2KHR*>( this );
}
operator VkSurfaceCapabilitiesPresentId2KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfaceCapabilitiesPresentId2KHR*>( this );
}
operator VkSurfaceCapabilitiesPresentId2KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfaceCapabilitiesPresentId2KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentId2Supported );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfaceCapabilitiesPresentId2KHR const & ) const = default;
#else
bool operator==( SurfaceCapabilitiesPresentId2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentId2Supported == rhs.presentId2Supported );
#endif
}
bool operator!=( SurfaceCapabilitiesPresentId2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesPresentId2KHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentId2Supported = {};
};
template <>
struct CppType<StructureType, StructureType::eSurfaceCapabilitiesPresentId2KHR>
{
using Type = SurfaceCapabilitiesPresentId2KHR;
};
// wrapper struct for struct VkSurfaceCapabilitiesPresentWait2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesPresentWait2KHR.html
struct SurfaceCapabilitiesPresentWait2KHR
{
using NativeType = VkSurfaceCapabilitiesPresentWait2KHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesPresentWait2KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentWait2KHR(VULKAN_HPP_NAMESPACE::Bool32 presentWait2Supported_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentWait2Supported{ presentWait2Supported_ }
{}
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentWait2KHR( SurfaceCapabilitiesPresentWait2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceCapabilitiesPresentWait2KHR( VkSurfaceCapabilitiesPresentWait2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceCapabilitiesPresentWait2KHR( *reinterpret_cast<SurfaceCapabilitiesPresentWait2KHR const *>( &rhs ) )
{}
SurfaceCapabilitiesPresentWait2KHR & operator=( SurfaceCapabilitiesPresentWait2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfaceCapabilitiesPresentWait2KHR & operator=( VkSurfaceCapabilitiesPresentWait2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentWait2KHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentWait2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentWait2KHR & setPresentWait2Supported( VULKAN_HPP_NAMESPACE::Bool32 presentWait2Supported_ ) VULKAN_HPP_NOEXCEPT
{
presentWait2Supported = presentWait2Supported_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSurfaceCapabilitiesPresentWait2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceCapabilitiesPresentWait2KHR*>( this );
}
operator VkSurfaceCapabilitiesPresentWait2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceCapabilitiesPresentWait2KHR*>( this );
}
operator VkSurfaceCapabilitiesPresentWait2KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfaceCapabilitiesPresentWait2KHR*>( this );
}
operator VkSurfaceCapabilitiesPresentWait2KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfaceCapabilitiesPresentWait2KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentWait2Supported );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfaceCapabilitiesPresentWait2KHR const & ) const = default;
#else
bool operator==( SurfaceCapabilitiesPresentWait2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentWait2Supported == rhs.presentWait2Supported );
#endif
}
bool operator!=( SurfaceCapabilitiesPresentWait2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesPresentWait2KHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentWait2Supported = {};
};
template <>
struct CppType<StructureType, StructureType::eSurfaceCapabilitiesPresentWait2KHR>
{
using Type = SurfaceCapabilitiesPresentWait2KHR;
};
// wrapper struct for struct VkSurfaceFormatKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceFormatKHR.html
struct SurfaceFormatKHR
{
using NativeType = VkSurfaceFormatKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceFormatKHR(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear) VULKAN_HPP_NOEXCEPT
: format{ format_ }, colorSpace{ colorSpace_ }
{}
VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceFormatKHR( *reinterpret_cast<SurfaceFormatKHR const *>( &rhs ) )
{}
SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const *>( &rhs );
return *this;
}
operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceFormatKHR*>( this );
}
operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceFormatKHR*>( this );
}
operator VkSurfaceFormatKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfaceFormatKHR*>( this );
}
operator VkSurfaceFormatKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfaceFormatKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ColorSpaceKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( format, colorSpace );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfaceFormatKHR const & ) const = default;
#else
bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( format == rhs.format )
&& ( colorSpace == rhs.colorSpace );
#endif
}
bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
};
// wrapper struct for struct VkSurfaceFormat2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceFormat2KHR.html
struct SurfaceFormat2KHR
{
using NativeType = VkSurfaceFormat2KHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR(VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, surfaceFormat{ surfaceFormat_ }
{}
VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceFormat2KHR( *reinterpret_cast<SurfaceFormat2KHR const *>( &rhs ) )
{}
SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const *>( &rhs );
return *this;
}
operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceFormat2KHR*>( this );
}
operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceFormat2KHR*>( this );
}
operator VkSurfaceFormat2KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfaceFormat2KHR*>( this );
}
operator VkSurfaceFormat2KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfaceFormat2KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, surfaceFormat );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfaceFormat2KHR const & ) const = default;
#else
bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( surfaceFormat == rhs.surfaceFormat );
#endif
}
bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {};
};
template <>
struct CppType<StructureType, StructureType::eSurfaceFormat2KHR>
{
using Type = SurfaceFormat2KHR;
};
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkSurfaceFullScreenExclusiveInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceFullScreenExclusiveInfoEXT.html
struct SurfaceFullScreenExclusiveInfoEXT
{
using NativeType = VkSurfaceFullScreenExclusiveInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT(VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, fullScreenExclusive{ fullScreenExclusive_ }
{}
VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveInfoEXT const *>( &rhs ) )
{}
SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT
{
fullScreenExclusive = fullScreenExclusive_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT*>( this );
}
operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT*>( this );
}
operator VkSurfaceFullScreenExclusiveInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT*>( this );
}
operator VkSurfaceFullScreenExclusiveInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, fullScreenExclusive );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default;
#else
bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fullScreenExclusive == rhs.fullScreenExclusive );
#endif
}
bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault;
};
template <>
struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveInfoEXT>
{
using Type = SurfaceFullScreenExclusiveInfoEXT;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkSurfaceFullScreenExclusiveWin32InfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceFullScreenExclusiveWin32InfoEXT.html
struct SurfaceFullScreenExclusiveWin32InfoEXT
{
using NativeType = VkSurfaceFullScreenExclusiveWin32InfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT(HMONITOR hmonitor_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, hmonitor{ hmonitor_ }
{}
VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceFullScreenExclusiveWin32InfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs ) )
{}
SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT
{
hmonitor = hmonitor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
}
operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
}
operator VkSurfaceFullScreenExclusiveWin32InfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
}
operator VkSurfaceFullScreenExclusiveWin32InfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, HMONITOR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, hmonitor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default;
#else
bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( hmonitor == rhs.hmonitor );
#endif
}
bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
const void * pNext = {};
HMONITOR hmonitor = {};
};
template <>
struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT>
{
using Type = SurfaceFullScreenExclusiveWin32InfoEXT;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
// wrapper struct for struct VkSurfacePresentModeCompatibilityEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfacePresentModeCompatibilityEXT.html
struct SurfacePresentModeCompatibilityEXT
{
using NativeType = VkSurfacePresentModeCompatibilityEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentModeCompatibilityEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfacePresentModeCompatibilityEXT(uint32_t presentModeCount_ = {}, VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentModeCount{ presentModeCount_ }, pPresentModes{ pPresentModes_ }
{}
VULKAN_HPP_CONSTEXPR SurfacePresentModeCompatibilityEXT( SurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfacePresentModeCompatibilityEXT( VkSurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfacePresentModeCompatibilityEXT( *reinterpret_cast<SurfacePresentModeCompatibilityEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SurfacePresentModeCompatibilityEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_, void * pNext_ = nullptr )
: pNext( pNext_ ), presentModeCount( static_cast<uint32_t>( presentModes_.size() ) ), pPresentModes( presentModes_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SurfacePresentModeCompatibilityEXT & operator=( SurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfacePresentModeCompatibilityEXT & operator=( VkSurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT
{
presentModeCount = presentModeCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPPresentModes( VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT
{
pPresentModes = pPresentModes_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SurfacePresentModeCompatibilityEXT & setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_ ) VULKAN_HPP_NOEXCEPT
{
presentModeCount = static_cast<uint32_t>( presentModes_.size() );
pPresentModes = presentModes_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSurfacePresentModeCompatibilityEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfacePresentModeCompatibilityEXT*>( this );
}
operator VkSurfacePresentModeCompatibilityEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfacePresentModeCompatibilityEXT*>( this );
}
operator VkSurfacePresentModeCompatibilityEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfacePresentModeCompatibilityEXT*>( this );
}
operator VkSurfacePresentModeCompatibilityEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfacePresentModeCompatibilityEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PresentModeKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentModeCount, pPresentModes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfacePresentModeCompatibilityEXT const & ) const = default;
#else
bool operator==( SurfacePresentModeCompatibilityEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentModeCount == rhs.presentModeCount )
&& ( pPresentModes == rhs.pPresentModes );
#endif
}
bool operator!=( SurfacePresentModeCompatibilityEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentModeCompatibilityEXT;
void * pNext = {};
uint32_t presentModeCount = {};
VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {};
};
template <>
struct CppType<StructureType, StructureType::eSurfacePresentModeCompatibilityEXT>
{
using Type = SurfacePresentModeCompatibilityEXT;
};
// wrapper struct for struct VkSurfacePresentModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfacePresentModeEXT.html
struct SurfacePresentModeEXT
{
using NativeType = VkSurfacePresentModeEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentModeEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfacePresentModeEXT(VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentMode{ presentMode_ }
{}
VULKAN_HPP_CONSTEXPR SurfacePresentModeEXT( SurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfacePresentModeEXT( VkSurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfacePresentModeEXT( *reinterpret_cast<SurfacePresentModeEXT const *>( &rhs ) )
{}
SurfacePresentModeEXT & operator=( SurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfacePresentModeEXT & operator=( VkSurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeEXT & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
{
presentMode = presentMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSurfacePresentModeEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfacePresentModeEXT*>( this );
}
operator VkSurfacePresentModeEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfacePresentModeEXT*>( this );
}
operator VkSurfacePresentModeEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfacePresentModeEXT*>( this );
}
operator VkSurfacePresentModeEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfacePresentModeEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PresentModeKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentMode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfacePresentModeEXT const & ) const = default;
#else
bool operator==( SurfacePresentModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentMode == rhs.presentMode );
#endif
}
bool operator!=( SurfacePresentModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentModeEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
};
template <>
struct CppType<StructureType, StructureType::eSurfacePresentModeEXT>
{
using Type = SurfacePresentModeEXT;
};
// wrapper struct for struct VkSurfacePresentScalingCapabilitiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfacePresentScalingCapabilitiesEXT.html
struct SurfacePresentScalingCapabilitiesEXT
{
using NativeType = VkSurfacePresentScalingCapabilitiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentScalingCapabilitiesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfacePresentScalingCapabilitiesEXT(VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling_ = {}, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX_ = {}, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minScaledImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxScaledImageExtent_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, supportedPresentScaling{ supportedPresentScaling_ }, supportedPresentGravityX{ supportedPresentGravityX_ }, supportedPresentGravityY{ supportedPresentGravityY_ }, minScaledImageExtent{ minScaledImageExtent_ }, maxScaledImageExtent{ maxScaledImageExtent_ }
{}
VULKAN_HPP_CONSTEXPR SurfacePresentScalingCapabilitiesEXT( SurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfacePresentScalingCapabilitiesEXT( VkSurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfacePresentScalingCapabilitiesEXT( *reinterpret_cast<SurfacePresentScalingCapabilitiesEXT const *>( &rhs ) )
{}
SurfacePresentScalingCapabilitiesEXT & operator=( SurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfacePresentScalingCapabilitiesEXT & operator=( VkSurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT const *>( &rhs );
return *this;
}
operator VkSurfacePresentScalingCapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfacePresentScalingCapabilitiesEXT*>( this );
}
operator VkSurfacePresentScalingCapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfacePresentScalingCapabilitiesEXT*>( this );
}
operator VkSurfacePresentScalingCapabilitiesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfacePresentScalingCapabilitiesEXT*>( this );
}
operator VkSurfacePresentScalingCapabilitiesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfacePresentScalingCapabilitiesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT const &, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT const &, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, supportedPresentScaling, supportedPresentGravityX, supportedPresentGravityY, minScaledImageExtent, maxScaledImageExtent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfacePresentScalingCapabilitiesEXT const & ) const = default;
#else
bool operator==( SurfacePresentScalingCapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( supportedPresentScaling == rhs.supportedPresentScaling )
&& ( supportedPresentGravityX == rhs.supportedPresentGravityX )
&& ( supportedPresentGravityY == rhs.supportedPresentGravityY )
&& ( minScaledImageExtent == rhs.minScaledImageExtent )
&& ( maxScaledImageExtent == rhs.maxScaledImageExtent );
#endif
}
bool operator!=( SurfacePresentScalingCapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentScalingCapabilitiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling = {};
VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX = {};
VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY = {};
VULKAN_HPP_NAMESPACE::Extent2D minScaledImageExtent = {};
VULKAN_HPP_NAMESPACE::Extent2D maxScaledImageExtent = {};
};
template <>
struct CppType<StructureType, StructureType::eSurfacePresentScalingCapabilitiesEXT>
{
using Type = SurfacePresentScalingCapabilitiesEXT;
};
// wrapper struct for struct VkSurfaceProtectedCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceProtectedCapabilitiesKHR.html
struct SurfaceProtectedCapabilitiesKHR
{
using NativeType = VkSurfaceProtectedCapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR(VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, supportsProtected{ supportsProtected_ }
{}
VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceProtectedCapabilitiesKHR( *reinterpret_cast<SurfaceProtectedCapabilitiesKHR const *>( &rhs ) )
{}
SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR*>( this );
}
operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>( this );
}
operator VkSurfaceProtectedCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR*>( this );
}
operator VkSurfaceProtectedCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, supportsProtected );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default;
#else
bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( supportsProtected == rhs.supportsProtected );
#endif
}
bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {};
};
template <>
struct CppType<StructureType, StructureType::eSurfaceProtectedCapabilitiesKHR>
{
using Type = SurfaceProtectedCapabilitiesKHR;
};
// wrapper struct for struct VkSwapchainCounterCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainCounterCreateInfoEXT.html
struct SwapchainCounterCreateInfoEXT
{
using NativeType = VkSwapchainCounterCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT(VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, surfaceCounters{ surfaceCounters_ }
{}
VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SwapchainCounterCreateInfoEXT( *reinterpret_cast<SwapchainCounterCreateInfoEXT const *>( &rhs ) )
{}
SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
{
surfaceCounters = surfaceCounters_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>( this );
}
operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSwapchainCounterCreateInfoEXT*>( this );
}
operator VkSwapchainCounterCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>( this );
}
operator VkSwapchainCounterCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSwapchainCounterCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, surfaceCounters );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default;
#else
bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( surfaceCounters == rhs.surfaceCounters );
#endif
}
bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {};
};
template <>
struct CppType<StructureType, StructureType::eSwapchainCounterCreateInfoEXT>
{
using Type = SwapchainCounterCreateInfoEXT;
};
// wrapper struct for struct VkSwapchainCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainCreateInfoKHR.html
struct SwapchainCreateInfoKHR
{
using NativeType = VkSwapchainCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, uint32_t minImageCount_ = {}, VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, uint32_t imageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, surface{ surface_ }, minImageCount{ minImageCount_ }, imageFormat{ imageFormat_ }, imageColorSpace{ imageColorSpace_ }, imageExtent{ imageExtent_ }, imageArrayLayers{ imageArrayLayers_ }, imageUsage{ imageUsage_ }, imageSharingMode{ imageSharingMode_ }, queueFamilyIndexCount{ queueFamilyIndexCount_ }, pQueueFamilyIndices{ pQueueFamilyIndices_ }, preTransform{ preTransform_ }, compositeAlpha{ compositeAlpha_ }, presentMode{ presentMode_ }, clipped{ clipped_ }, oldSwapchain{ oldSwapchain_ }
{}
VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SwapchainCreateInfoKHR( *reinterpret_cast<SwapchainCreateInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, uint32_t minImageCount_, VULKAN_HPP_NAMESPACE::Format imageFormat_, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, uint32_t imageArrayLayers_, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
{
surface = surface_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT
{
minImageCount = minImageCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
{
imageFormat = imageFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
{
imageColorSpace = imageColorSpace_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
{
imageArrayLayers = imageArrayLayers_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
{
imageUsage = imageUsage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
{
imageSharingMode = imageSharingMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = queueFamilyIndexCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
pQueueFamilyIndices = pQueueFamilyIndices_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SwapchainCreateInfoKHR & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
pQueueFamilyIndices = queueFamilyIndices_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
{
preTransform = preTransform_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
{
compositeAlpha = compositeAlpha_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
{
presentMode = presentMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
{
clipped = clipped_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
{
oldSwapchain = oldSwapchain_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>( this );
}
operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSwapchainCreateInfoKHR*>( this );
}
operator VkSwapchainCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSwapchainCreateInfoKHR*>( this );
}
operator VkSwapchainCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSwapchainCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::SurfaceKHR const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ColorSpaceKHR const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::PresentModeKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, surface, minImageCount, imageFormat, imageColorSpace, imageExtent, imageArrayLayers, imageUsage, imageSharingMode, queueFamilyIndexCount, pQueueFamilyIndices, preTransform, compositeAlpha, presentMode, clipped, oldSwapchain );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SwapchainCreateInfoKHR const & ) const = default;
#else
bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( surface == rhs.surface )
&& ( minImageCount == rhs.minImageCount )
&& ( imageFormat == rhs.imageFormat )
&& ( imageColorSpace == rhs.imageColorSpace )
&& ( imageExtent == rhs.imageExtent )
&& ( imageArrayLayers == rhs.imageArrayLayers )
&& ( imageUsage == rhs.imageUsage )
&& ( imageSharingMode == rhs.imageSharingMode )
&& ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
&& ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
&& ( preTransform == rhs.preTransform )
&& ( compositeAlpha == rhs.compositeAlpha )
&& ( presentMode == rhs.presentMode )
&& ( clipped == rhs.clipped )
&& ( oldSwapchain == rhs.oldSwapchain );
#endif
}
bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
uint32_t minImageCount = {};
VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
uint32_t imageArrayLayers = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
uint32_t queueFamilyIndexCount = {};
const uint32_t * pQueueFamilyIndices = {};
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque;
VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
VULKAN_HPP_NAMESPACE::Bool32 clipped = {};
VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {};
};
template <>
struct CppType<StructureType, StructureType::eSwapchainCreateInfoKHR>
{
using Type = SwapchainCreateInfoKHR;
};
// wrapper struct for struct VkSwapchainDisplayNativeHdrCreateInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainDisplayNativeHdrCreateInfoAMD.html
struct SwapchainDisplayNativeHdrCreateInfoAMD
{
using NativeType = VkSwapchainDisplayNativeHdrCreateInfoAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, localDimmingEnable{ localDimmingEnable_ }
{}
VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: SwapchainDisplayNativeHdrCreateInfoAMD( *reinterpret_cast<SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs ) )
{}
SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT
{
localDimmingEnable = localDimmingEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
}
operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
}
operator VkSwapchainDisplayNativeHdrCreateInfoAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
}
operator VkSwapchainDisplayNativeHdrCreateInfoAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, localDimmingEnable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default;
#else
bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( localDimmingEnable == rhs.localDimmingEnable );
#endif
}
bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {};
};
template <>
struct CppType<StructureType, StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD>
{
using Type = SwapchainDisplayNativeHdrCreateInfoAMD;
};
// wrapper struct for struct VkSwapchainLatencyCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainLatencyCreateInfoNV.html
struct SwapchainLatencyCreateInfoNV
{
using NativeType = VkSwapchainLatencyCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainLatencyCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, latencyModeEnable{ latencyModeEnable_ }
{}
VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV( SwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainLatencyCreateInfoNV( VkSwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: SwapchainLatencyCreateInfoNV( *reinterpret_cast<SwapchainLatencyCreateInfoNV const *>( &rhs ) )
{}
SwapchainLatencyCreateInfoNV & operator=( SwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SwapchainLatencyCreateInfoNV & operator=( VkSwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SwapchainLatencyCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainLatencyCreateInfoNV & setLatencyModeEnable( VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable_ ) VULKAN_HPP_NOEXCEPT
{
latencyModeEnable = latencyModeEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSwapchainLatencyCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainLatencyCreateInfoNV*>( this );
}
operator VkSwapchainLatencyCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSwapchainLatencyCreateInfoNV*>( this );
}
operator VkSwapchainLatencyCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSwapchainLatencyCreateInfoNV*>( this );
}
operator VkSwapchainLatencyCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSwapchainLatencyCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, latencyModeEnable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SwapchainLatencyCreateInfoNV const & ) const = default;
#else
bool operator==( SwapchainLatencyCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( latencyModeEnable == rhs.latencyModeEnable );
#endif
}
bool operator!=( SwapchainLatencyCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainLatencyCreateInfoNV;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable = {};
};
template <>
struct CppType<StructureType, StructureType::eSwapchainLatencyCreateInfoNV>
{
using Type = SwapchainLatencyCreateInfoNV;
};
// wrapper struct for struct VkSwapchainPresentBarrierCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentBarrierCreateInfoNV.html
struct SwapchainPresentBarrierCreateInfoNV
{
using NativeType = VkSwapchainPresentBarrierCreateInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentBarrierCreateInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentBarrierEnable{ presentBarrierEnable_ }
{}
VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainPresentBarrierCreateInfoNV( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: SwapchainPresentBarrierCreateInfoNV( *reinterpret_cast<SwapchainPresentBarrierCreateInfoNV const *>( &rhs ) )
{}
SwapchainPresentBarrierCreateInfoNV & operator=( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SwapchainPresentBarrierCreateInfoNV & operator=( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & setPresentBarrierEnable( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ ) VULKAN_HPP_NOEXCEPT
{
presentBarrierEnable = presentBarrierEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSwapchainPresentBarrierCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainPresentBarrierCreateInfoNV*>( this );
}
operator VkSwapchainPresentBarrierCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSwapchainPresentBarrierCreateInfoNV*>( this );
}
operator VkSwapchainPresentBarrierCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSwapchainPresentBarrierCreateInfoNV*>( this );
}
operator VkSwapchainPresentBarrierCreateInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSwapchainPresentBarrierCreateInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentBarrierEnable );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SwapchainPresentBarrierCreateInfoNV const & ) const = default;
#else
bool operator==( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentBarrierEnable == rhs.presentBarrierEnable );
#endif
}
bool operator!=( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentBarrierCreateInfoNV;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable = {};
};
template <>
struct CppType<StructureType, StructureType::eSwapchainPresentBarrierCreateInfoNV>
{
using Type = SwapchainPresentBarrierCreateInfoNV;
};
// wrapper struct for struct VkSwapchainPresentFenceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentFenceInfoEXT.html
struct SwapchainPresentFenceInfoEXT
{
using NativeType = VkSwapchainPresentFenceInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentFenceInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SwapchainPresentFenceInfoEXT(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::Fence * pFences_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, swapchainCount{ swapchainCount_ }, pFences{ pFences_ }
{}
VULKAN_HPP_CONSTEXPR SwapchainPresentFenceInfoEXT( SwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainPresentFenceInfoEXT( VkSwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SwapchainPresentFenceInfoEXT( *reinterpret_cast<SwapchainPresentFenceInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SwapchainPresentFenceInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Fence> const & fences_, const void * pNext_ = nullptr )
: pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( fences_.size() ) ), pFences( fences_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SwapchainPresentFenceInfoEXT & operator=( SwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SwapchainPresentFenceInfoEXT & operator=( VkSwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = swapchainCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setPFences( const VULKAN_HPP_NAMESPACE::Fence * pFences_ ) VULKAN_HPP_NOEXCEPT
{
pFences = pFences_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SwapchainPresentFenceInfoEXT & setFences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Fence> const & fences_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( fences_.size() );
pFences = fences_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSwapchainPresentFenceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainPresentFenceInfoEXT*>( this );
}
operator VkSwapchainPresentFenceInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSwapchainPresentFenceInfoEXT*>( this );
}
operator VkSwapchainPresentFenceInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSwapchainPresentFenceInfoEXT*>( this );
}
operator VkSwapchainPresentFenceInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSwapchainPresentFenceInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Fence * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, swapchainCount, pFences );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SwapchainPresentFenceInfoEXT const & ) const = default;
#else
bool operator==( SwapchainPresentFenceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchainCount == rhs.swapchainCount )
&& ( pFences == rhs.pFences );
#endif
}
bool operator!=( SwapchainPresentFenceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentFenceInfoEXT;
const void * pNext = {};
uint32_t swapchainCount = {};
const VULKAN_HPP_NAMESPACE::Fence * pFences = {};
};
template <>
struct CppType<StructureType, StructureType::eSwapchainPresentFenceInfoEXT>
{
using Type = SwapchainPresentFenceInfoEXT;
};
// wrapper struct for struct VkSwapchainPresentModeInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentModeInfoEXT.html
struct SwapchainPresentModeInfoEXT
{
using NativeType = VkSwapchainPresentModeInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentModeInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoEXT(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, swapchainCount{ swapchainCount_ }, pPresentModes{ pPresentModes_ }
{}
VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoEXT( SwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainPresentModeInfoEXT( VkSwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SwapchainPresentModeInfoEXT( *reinterpret_cast<SwapchainPresentModeInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SwapchainPresentModeInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_, const void * pNext_ = nullptr )
: pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( presentModes_.size() ) ), pPresentModes( presentModes_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SwapchainPresentModeInfoEXT & operator=( SwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SwapchainPresentModeInfoEXT & operator=( VkSwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = swapchainCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setPPresentModes( const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT
{
pPresentModes = pPresentModes_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SwapchainPresentModeInfoEXT & setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( presentModes_.size() );
pPresentModes = presentModes_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSwapchainPresentModeInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainPresentModeInfoEXT*>( this );
}
operator VkSwapchainPresentModeInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSwapchainPresentModeInfoEXT*>( this );
}
operator VkSwapchainPresentModeInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSwapchainPresentModeInfoEXT*>( this );
}
operator VkSwapchainPresentModeInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSwapchainPresentModeInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentModeKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, swapchainCount, pPresentModes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SwapchainPresentModeInfoEXT const & ) const = default;
#else
bool operator==( SwapchainPresentModeInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( swapchainCount == rhs.swapchainCount )
&& ( pPresentModes == rhs.pPresentModes );
#endif
}
bool operator!=( SwapchainPresentModeInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentModeInfoEXT;
const void * pNext = {};
uint32_t swapchainCount = {};
const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {};
};
template <>
struct CppType<StructureType, StructureType::eSwapchainPresentModeInfoEXT>
{
using Type = SwapchainPresentModeInfoEXT;
};
// wrapper struct for struct VkSwapchainPresentModesCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentModesCreateInfoEXT.html
struct SwapchainPresentModesCreateInfoEXT
{
using NativeType = VkSwapchainPresentModesCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentModesCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoEXT(uint32_t presentModeCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, presentModeCount{ presentModeCount_ }, pPresentModes{ pPresentModes_ }
{}
VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoEXT( SwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainPresentModesCreateInfoEXT( VkSwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SwapchainPresentModesCreateInfoEXT( *reinterpret_cast<SwapchainPresentModesCreateInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SwapchainPresentModesCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_, const void * pNext_ = nullptr )
: pNext( pNext_ ), presentModeCount( static_cast<uint32_t>( presentModes_.size() ) ), pPresentModes( presentModes_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
SwapchainPresentModesCreateInfoEXT & operator=( SwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SwapchainPresentModesCreateInfoEXT & operator=( VkSwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT
{
presentModeCount = presentModeCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & setPPresentModes( const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT
{
pPresentModes = pPresentModes_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SwapchainPresentModesCreateInfoEXT & setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentModeKHR> const & presentModes_ ) VULKAN_HPP_NOEXCEPT
{
presentModeCount = static_cast<uint32_t>( presentModes_.size() );
pPresentModes = presentModes_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSwapchainPresentModesCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainPresentModesCreateInfoEXT*>( this );
}
operator VkSwapchainPresentModesCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSwapchainPresentModesCreateInfoEXT*>( this );
}
operator VkSwapchainPresentModesCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSwapchainPresentModesCreateInfoEXT*>( this );
}
operator VkSwapchainPresentModesCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSwapchainPresentModesCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentModeKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, presentModeCount, pPresentModes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SwapchainPresentModesCreateInfoEXT const & ) const = default;
#else
bool operator==( SwapchainPresentModesCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( presentModeCount == rhs.presentModeCount )
&& ( pPresentModes == rhs.pPresentModes );
#endif
}
bool operator!=( SwapchainPresentModesCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentModesCreateInfoEXT;
const void * pNext = {};
uint32_t presentModeCount = {};
const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {};
};
template <>
struct CppType<StructureType, StructureType::eSwapchainPresentModesCreateInfoEXT>
{
using Type = SwapchainPresentModesCreateInfoEXT;
};
// wrapper struct for struct VkSwapchainPresentScalingCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentScalingCreateInfoEXT.html
struct SwapchainPresentScalingCreateInfoEXT
{
using NativeType = VkSwapchainPresentScalingCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentScalingCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SwapchainPresentScalingCreateInfoEXT(VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior_ = {}, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX_ = {}, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, scalingBehavior{ scalingBehavior_ }, presentGravityX{ presentGravityX_ }, presentGravityY{ presentGravityY_ }
{}
VULKAN_HPP_CONSTEXPR SwapchainPresentScalingCreateInfoEXT( SwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainPresentScalingCreateInfoEXT( VkSwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SwapchainPresentScalingCreateInfoEXT( *reinterpret_cast<SwapchainPresentScalingCreateInfoEXT const *>( &rhs ) )
{}
SwapchainPresentScalingCreateInfoEXT & operator=( SwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
SwapchainPresentScalingCreateInfoEXT & operator=( VkSwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & setScalingBehavior( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior_ ) VULKAN_HPP_NOEXCEPT
{
scalingBehavior = scalingBehavior_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & setPresentGravityX( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX_ ) VULKAN_HPP_NOEXCEPT
{
presentGravityX = presentGravityX_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & setPresentGravityY( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY_ ) VULKAN_HPP_NOEXCEPT
{
presentGravityY = presentGravityY_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkSwapchainPresentScalingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainPresentScalingCreateInfoEXT*>( this );
}
operator VkSwapchainPresentScalingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSwapchainPresentScalingCreateInfoEXT*>( this );
}
operator VkSwapchainPresentScalingCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkSwapchainPresentScalingCreateInfoEXT*>( this );
}
operator VkSwapchainPresentScalingCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkSwapchainPresentScalingCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT const &, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT const &, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, scalingBehavior, presentGravityX, presentGravityY );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( SwapchainPresentScalingCreateInfoEXT const & ) const = default;
#else
bool operator==( SwapchainPresentScalingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( scalingBehavior == rhs.scalingBehavior )
&& ( presentGravityX == rhs.presentGravityX )
&& ( presentGravityY == rhs.presentGravityY );
#endif
}
bool operator!=( SwapchainPresentScalingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentScalingCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior = {};
VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX = {};
VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY = {};
};
template <>
struct CppType<StructureType, StructureType::eSwapchainPresentScalingCreateInfoEXT>
{
using Type = SwapchainPresentScalingCreateInfoEXT;
};
// wrapper struct for struct VkTensorCaptureDescriptorDataInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorCaptureDescriptorDataInfoARM.html
struct TensorCaptureDescriptorDataInfoARM
{
using NativeType = VkTensorCaptureDescriptorDataInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorCaptureDescriptorDataInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TensorCaptureDescriptorDataInfoARM(VULKAN_HPP_NAMESPACE::TensorARM tensor_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tensor{ tensor_ }
{}
VULKAN_HPP_CONSTEXPR TensorCaptureDescriptorDataInfoARM( TensorCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TensorCaptureDescriptorDataInfoARM( VkTensorCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: TensorCaptureDescriptorDataInfoARM( *reinterpret_cast<TensorCaptureDescriptorDataInfoARM const *>( &rhs ) )
{}
TensorCaptureDescriptorDataInfoARM & operator=( TensorCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TensorCaptureDescriptorDataInfoARM & operator=( VkTensorCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TensorCaptureDescriptorDataInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorCaptureDescriptorDataInfoARM & setTensor( VULKAN_HPP_NAMESPACE::TensorARM tensor_ ) VULKAN_HPP_NOEXCEPT
{
tensor = tensor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTensorCaptureDescriptorDataInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTensorCaptureDescriptorDataInfoARM*>( this );
}
operator VkTensorCaptureDescriptorDataInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTensorCaptureDescriptorDataInfoARM*>( this );
}
operator VkTensorCaptureDescriptorDataInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTensorCaptureDescriptorDataInfoARM*>( this );
}
operator VkTensorCaptureDescriptorDataInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTensorCaptureDescriptorDataInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TensorARM const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tensor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TensorCaptureDescriptorDataInfoARM const & ) const = default;
#else
bool operator==( TensorCaptureDescriptorDataInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tensor == rhs.tensor );
#endif
}
bool operator!=( TensorCaptureDescriptorDataInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTensorCaptureDescriptorDataInfoARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TensorARM tensor = {};
};
template <>
struct CppType<StructureType, StructureType::eTensorCaptureDescriptorDataInfoARM>
{
using Type = TensorCaptureDescriptorDataInfoARM;
};
// wrapper struct for struct VkTensorMemoryBarrierARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorMemoryBarrierARM.html
struct TensorMemoryBarrierARM
{
using NativeType = VkTensorMemoryBarrierARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorMemoryBarrierARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TensorMemoryBarrierARM(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::TensorARM tensor_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, srcStageMask{ srcStageMask_ }, srcAccessMask{ srcAccessMask_ }, dstStageMask{ dstStageMask_ }, dstAccessMask{ dstAccessMask_ }, srcQueueFamilyIndex{ srcQueueFamilyIndex_ }, dstQueueFamilyIndex{ dstQueueFamilyIndex_ }, tensor{ tensor_ }
{}
VULKAN_HPP_CONSTEXPR TensorMemoryBarrierARM( TensorMemoryBarrierARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TensorMemoryBarrierARM( VkTensorMemoryBarrierARM const & rhs ) VULKAN_HPP_NOEXCEPT
: TensorMemoryBarrierARM( *reinterpret_cast<TensorMemoryBarrierARM const *>( &rhs ) )
{}
TensorMemoryBarrierARM & operator=( TensorMemoryBarrierARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TensorMemoryBarrierARM & operator=( VkTensorMemoryBarrierARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
srcQueueFamilyIndex = srcQueueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
dstQueueFamilyIndex = dstQueueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setTensor( VULKAN_HPP_NAMESPACE::TensorARM tensor_ ) VULKAN_HPP_NOEXCEPT
{
tensor = tensor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTensorMemoryBarrierARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTensorMemoryBarrierARM*>( this );
}
operator VkTensorMemoryBarrierARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTensorMemoryBarrierARM*>( this );
}
operator VkTensorMemoryBarrierARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTensorMemoryBarrierARM*>( this );
}
operator VkTensorMemoryBarrierARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTensorMemoryBarrierARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::TensorARM const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, tensor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TensorMemoryBarrierARM const & ) const = default;
#else
bool operator==( TensorMemoryBarrierARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( srcStageMask == rhs.srcStageMask )
&& ( srcAccessMask == rhs.srcAccessMask )
&& ( dstStageMask == rhs.dstStageMask )
&& ( dstAccessMask == rhs.dstAccessMask )
&& ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
&& ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
&& ( tensor == rhs.tensor );
#endif
}
bool operator!=( TensorMemoryBarrierARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTensorMemoryBarrierARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
uint32_t srcQueueFamilyIndex = {};
uint32_t dstQueueFamilyIndex = {};
VULKAN_HPP_NAMESPACE::TensorARM tensor = {};
};
template <>
struct CppType<StructureType, StructureType::eTensorMemoryBarrierARM>
{
using Type = TensorMemoryBarrierARM;
};
// wrapper struct for struct VkTensorDependencyInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorDependencyInfoARM.html
struct TensorDependencyInfoARM
{
using NativeType = VkTensorDependencyInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorDependencyInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TensorDependencyInfoARM(uint32_t tensorMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM * pTensorMemoryBarriers_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tensorMemoryBarrierCount{ tensorMemoryBarrierCount_ }, pTensorMemoryBarriers{ pTensorMemoryBarriers_ }
{}
VULKAN_HPP_CONSTEXPR TensorDependencyInfoARM( TensorDependencyInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TensorDependencyInfoARM( VkTensorDependencyInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: TensorDependencyInfoARM( *reinterpret_cast<TensorDependencyInfoARM const *>( &rhs ) )
{}
TensorDependencyInfoARM & operator=( TensorDependencyInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TensorDependencyInfoARM & operator=( VkTensorDependencyInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TensorDependencyInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TensorDependencyInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorDependencyInfoARM & setTensorMemoryBarrierCount( uint32_t tensorMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
{
tensorMemoryBarrierCount = tensorMemoryBarrierCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorDependencyInfoARM & setPTensorMemoryBarriers( const VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM * pTensorMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
pTensorMemoryBarriers = pTensorMemoryBarriers_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTensorDependencyInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTensorDependencyInfoARM*>( this );
}
operator VkTensorDependencyInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTensorDependencyInfoARM*>( this );
}
operator VkTensorDependencyInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTensorDependencyInfoARM*>( this );
}
operator VkTensorDependencyInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTensorDependencyInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tensorMemoryBarrierCount, pTensorMemoryBarriers );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TensorDependencyInfoARM const & ) const = default;
#else
bool operator==( TensorDependencyInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tensorMemoryBarrierCount == rhs.tensorMemoryBarrierCount )
&& ( pTensorMemoryBarriers == rhs.pTensorMemoryBarriers );
#endif
}
bool operator!=( TensorDependencyInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTensorDependencyInfoARM;
const void * pNext = {};
uint32_t tensorMemoryBarrierCount = {};
const VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM * pTensorMemoryBarriers = {};
};
template <>
struct CppType<StructureType, StructureType::eTensorDependencyInfoARM>
{
using Type = TensorDependencyInfoARM;
};
// wrapper struct for struct VkTensorFormatPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorFormatPropertiesARM.html
struct TensorFormatPropertiesARM
{
using NativeType = VkTensorFormatPropertiesARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorFormatPropertiesARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TensorFormatPropertiesARM(VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingTensorFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingTensorFeatures_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, optimalTilingTensorFeatures{ optimalTilingTensorFeatures_ }, linearTilingTensorFeatures{ linearTilingTensorFeatures_ }
{}
VULKAN_HPP_CONSTEXPR TensorFormatPropertiesARM( TensorFormatPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TensorFormatPropertiesARM( VkTensorFormatPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
: TensorFormatPropertiesARM( *reinterpret_cast<TensorFormatPropertiesARM const *>( &rhs ) )
{}
TensorFormatPropertiesARM & operator=( TensorFormatPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TensorFormatPropertiesARM & operator=( VkTensorFormatPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TensorFormatPropertiesARM const *>( &rhs );
return *this;
}
operator VkTensorFormatPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTensorFormatPropertiesARM*>( this );
}
operator VkTensorFormatPropertiesARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTensorFormatPropertiesARM*>( this );
}
operator VkTensorFormatPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTensorFormatPropertiesARM*>( this );
}
operator VkTensorFormatPropertiesARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTensorFormatPropertiesARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, optimalTilingTensorFeatures, linearTilingTensorFeatures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TensorFormatPropertiesARM const & ) const = default;
#else
bool operator==( TensorFormatPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( optimalTilingTensorFeatures == rhs.optimalTilingTensorFeatures )
&& ( linearTilingTensorFeatures == rhs.linearTilingTensorFeatures );
#endif
}
bool operator!=( TensorFormatPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTensorFormatPropertiesARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingTensorFeatures = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingTensorFeatures = {};
};
template <>
struct CppType<StructureType, StructureType::eTensorFormatPropertiesARM>
{
using Type = TensorFormatPropertiesARM;
};
// wrapper struct for struct VkTensorMemoryRequirementsInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorMemoryRequirementsInfoARM.html
struct TensorMemoryRequirementsInfoARM
{
using NativeType = VkTensorMemoryRequirementsInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorMemoryRequirementsInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TensorMemoryRequirementsInfoARM(VULKAN_HPP_NAMESPACE::TensorARM tensor_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tensor{ tensor_ }
{}
VULKAN_HPP_CONSTEXPR TensorMemoryRequirementsInfoARM( TensorMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TensorMemoryRequirementsInfoARM( VkTensorMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: TensorMemoryRequirementsInfoARM( *reinterpret_cast<TensorMemoryRequirementsInfoARM const *>( &rhs ) )
{}
TensorMemoryRequirementsInfoARM & operator=( TensorMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TensorMemoryRequirementsInfoARM & operator=( VkTensorMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TensorMemoryRequirementsInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorMemoryRequirementsInfoARM & setTensor( VULKAN_HPP_NAMESPACE::TensorARM tensor_ ) VULKAN_HPP_NOEXCEPT
{
tensor = tensor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTensorMemoryRequirementsInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTensorMemoryRequirementsInfoARM*>( this );
}
operator VkTensorMemoryRequirementsInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTensorMemoryRequirementsInfoARM*>( this );
}
operator VkTensorMemoryRequirementsInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTensorMemoryRequirementsInfoARM*>( this );
}
operator VkTensorMemoryRequirementsInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTensorMemoryRequirementsInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TensorARM const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tensor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TensorMemoryRequirementsInfoARM const & ) const = default;
#else
bool operator==( TensorMemoryRequirementsInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tensor == rhs.tensor );
#endif
}
bool operator!=( TensorMemoryRequirementsInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTensorMemoryRequirementsInfoARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TensorARM tensor = {};
};
template <>
struct CppType<StructureType, StructureType::eTensorMemoryRequirementsInfoARM>
{
using Type = TensorMemoryRequirementsInfoARM;
};
// wrapper struct for struct VkTensorViewCaptureDescriptorDataInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewCaptureDescriptorDataInfoARM.html
struct TensorViewCaptureDescriptorDataInfoARM
{
using NativeType = VkTensorViewCaptureDescriptorDataInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorViewCaptureDescriptorDataInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TensorViewCaptureDescriptorDataInfoARM(VULKAN_HPP_NAMESPACE::TensorViewARM tensorView_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tensorView{ tensorView_ }
{}
VULKAN_HPP_CONSTEXPR TensorViewCaptureDescriptorDataInfoARM( TensorViewCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TensorViewCaptureDescriptorDataInfoARM( VkTensorViewCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: TensorViewCaptureDescriptorDataInfoARM( *reinterpret_cast<TensorViewCaptureDescriptorDataInfoARM const *>( &rhs ) )
{}
TensorViewCaptureDescriptorDataInfoARM & operator=( TensorViewCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TensorViewCaptureDescriptorDataInfoARM & operator=( VkTensorViewCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TensorViewCaptureDescriptorDataInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorViewCaptureDescriptorDataInfoARM & setTensorView( VULKAN_HPP_NAMESPACE::TensorViewARM tensorView_ ) VULKAN_HPP_NOEXCEPT
{
tensorView = tensorView_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTensorViewCaptureDescriptorDataInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTensorViewCaptureDescriptorDataInfoARM*>( this );
}
operator VkTensorViewCaptureDescriptorDataInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTensorViewCaptureDescriptorDataInfoARM*>( this );
}
operator VkTensorViewCaptureDescriptorDataInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTensorViewCaptureDescriptorDataInfoARM*>( this );
}
operator VkTensorViewCaptureDescriptorDataInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTensorViewCaptureDescriptorDataInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TensorViewARM const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tensorView );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TensorViewCaptureDescriptorDataInfoARM const & ) const = default;
#else
bool operator==( TensorViewCaptureDescriptorDataInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tensorView == rhs.tensorView );
#endif
}
bool operator!=( TensorViewCaptureDescriptorDataInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTensorViewCaptureDescriptorDataInfoARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TensorViewARM tensorView = {};
};
template <>
struct CppType<StructureType, StructureType::eTensorViewCaptureDescriptorDataInfoARM>
{
using Type = TensorViewCaptureDescriptorDataInfoARM;
};
// wrapper struct for struct VkTensorViewCreateInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewCreateInfoARM.html
struct TensorViewCreateInfoARM
{
using NativeType = VkTensorViewCreateInfoARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorViewCreateInfoARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TensorViewCreateInfoARM(VULKAN_HPP_NAMESPACE::TensorViewCreateFlagsARM flags_ = {}, VULKAN_HPP_NAMESPACE::TensorARM tensor_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, tensor{ tensor_ }, format{ format_ }
{}
VULKAN_HPP_CONSTEXPR TensorViewCreateInfoARM( TensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TensorViewCreateInfoARM( VkTensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
: TensorViewCreateInfoARM( *reinterpret_cast<TensorViewCreateInfoARM const *>( &rhs ) )
{}
TensorViewCreateInfoARM & operator=( TensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TensorViewCreateInfoARM & operator=( VkTensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setFlags( VULKAN_HPP_NAMESPACE::TensorViewCreateFlagsARM flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setTensor( VULKAN_HPP_NAMESPACE::TensorARM tensor_ ) VULKAN_HPP_NOEXCEPT
{
tensor = tensor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTensorViewCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTensorViewCreateInfoARM*>( this );
}
operator VkTensorViewCreateInfoARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTensorViewCreateInfoARM*>( this );
}
operator VkTensorViewCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTensorViewCreateInfoARM*>( this );
}
operator VkTensorViewCreateInfoARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTensorViewCreateInfoARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TensorViewCreateFlagsARM const &, VULKAN_HPP_NAMESPACE::TensorARM const &, VULKAN_HPP_NAMESPACE::Format const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, tensor, format );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TensorViewCreateInfoARM const & ) const = default;
#else
bool operator==( TensorViewCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( tensor == rhs.tensor )
&& ( format == rhs.format );
#endif
}
bool operator!=( TensorViewCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTensorViewCreateInfoARM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TensorViewCreateFlagsARM flags = {};
VULKAN_HPP_NAMESPACE::TensorARM tensor = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
};
template <>
struct CppType<StructureType, StructureType::eTensorViewCreateInfoARM>
{
using Type = TensorViewCreateInfoARM;
};
// wrapper struct for struct VkTextureLODGatherFormatPropertiesAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTextureLODGatherFormatPropertiesAMD.html
struct TextureLODGatherFormatPropertiesAMD
{
using NativeType = VkTextureLODGatherFormatPropertiesAMD;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD(VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, supportsTextureGatherLODBiasAMD{ supportsTextureGatherLODBiasAMD_ }
{}
VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: TextureLODGatherFormatPropertiesAMD( *reinterpret_cast<TextureLODGatherFormatPropertiesAMD const *>( &rhs ) )
{}
TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const *>( &rhs );
return *this;
}
operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD*>( this );
}
operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD*>( this );
}
operator VkTextureLODGatherFormatPropertiesAMD const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD*>( this );
}
operator VkTextureLODGatherFormatPropertiesAMD *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, supportsTextureGatherLODBiasAMD );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default;
#else
bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
#endif
}
bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {};
};
template <>
struct CppType<StructureType, StructureType::eTextureLodGatherFormatPropertiesAMD>
{
using Type = TextureLODGatherFormatPropertiesAMD;
};
// wrapper struct for struct VkTileMemoryBindInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTileMemoryBindInfoQCOM.html
struct TileMemoryBindInfoQCOM
{
using NativeType = VkTileMemoryBindInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTileMemoryBindInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TileMemoryBindInfoQCOM(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memory{ memory_ }
{}
VULKAN_HPP_CONSTEXPR TileMemoryBindInfoQCOM( TileMemoryBindInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TileMemoryBindInfoQCOM( VkTileMemoryBindInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: TileMemoryBindInfoQCOM( *reinterpret_cast<TileMemoryBindInfoQCOM const *>( &rhs ) )
{}
TileMemoryBindInfoQCOM & operator=( TileMemoryBindInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TileMemoryBindInfoQCOM & operator=( VkTileMemoryBindInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TileMemoryBindInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TileMemoryBindInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TileMemoryBindInfoQCOM & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTileMemoryBindInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTileMemoryBindInfoQCOM*>( this );
}
operator VkTileMemoryBindInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTileMemoryBindInfoQCOM*>( this );
}
operator VkTileMemoryBindInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTileMemoryBindInfoQCOM*>( this );
}
operator VkTileMemoryBindInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTileMemoryBindInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memory );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TileMemoryBindInfoQCOM const & ) const = default;
#else
bool operator==( TileMemoryBindInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memory == rhs.memory );
#endif
}
bool operator!=( TileMemoryBindInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTileMemoryBindInfoQCOM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
};
template <>
struct CppType<StructureType, StructureType::eTileMemoryBindInfoQCOM>
{
using Type = TileMemoryBindInfoQCOM;
};
// wrapper struct for struct VkTileMemoryRequirementsQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTileMemoryRequirementsQCOM.html
struct TileMemoryRequirementsQCOM
{
using NativeType = VkTileMemoryRequirementsQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTileMemoryRequirementsQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TileMemoryRequirementsQCOM(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, size{ size_ }, alignment{ alignment_ }
{}
VULKAN_HPP_CONSTEXPR TileMemoryRequirementsQCOM( TileMemoryRequirementsQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TileMemoryRequirementsQCOM( VkTileMemoryRequirementsQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: TileMemoryRequirementsQCOM( *reinterpret_cast<TileMemoryRequirementsQCOM const *>( &rhs ) )
{}
TileMemoryRequirementsQCOM & operator=( TileMemoryRequirementsQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TileMemoryRequirementsQCOM & operator=( VkTileMemoryRequirementsQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TileMemoryRequirementsQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TileMemoryRequirementsQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TileMemoryRequirementsQCOM & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TileMemoryRequirementsQCOM & setAlignment( VULKAN_HPP_NAMESPACE::DeviceSize alignment_ ) VULKAN_HPP_NOEXCEPT
{
alignment = alignment_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTileMemoryRequirementsQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTileMemoryRequirementsQCOM*>( this );
}
operator VkTileMemoryRequirementsQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTileMemoryRequirementsQCOM*>( this );
}
operator VkTileMemoryRequirementsQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTileMemoryRequirementsQCOM*>( this );
}
operator VkTileMemoryRequirementsQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTileMemoryRequirementsQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, size, alignment );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TileMemoryRequirementsQCOM const & ) const = default;
#else
bool operator==( TileMemoryRequirementsQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( size == rhs.size )
&& ( alignment == rhs.alignment );
#endif
}
bool operator!=( TileMemoryRequirementsQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTileMemoryRequirementsQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
VULKAN_HPP_NAMESPACE::DeviceSize alignment = {};
};
template <>
struct CppType<StructureType, StructureType::eTileMemoryRequirementsQCOM>
{
using Type = TileMemoryRequirementsQCOM;
};
// wrapper struct for struct VkTileMemorySizeInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTileMemorySizeInfoQCOM.html
struct TileMemorySizeInfoQCOM
{
using NativeType = VkTileMemorySizeInfoQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTileMemorySizeInfoQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TileMemorySizeInfoQCOM(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, size{ size_ }
{}
VULKAN_HPP_CONSTEXPR TileMemorySizeInfoQCOM( TileMemorySizeInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TileMemorySizeInfoQCOM( VkTileMemorySizeInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: TileMemorySizeInfoQCOM( *reinterpret_cast<TileMemorySizeInfoQCOM const *>( &rhs ) )
{}
TileMemorySizeInfoQCOM & operator=( TileMemorySizeInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TileMemorySizeInfoQCOM & operator=( VkTileMemorySizeInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TileMemorySizeInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TileMemorySizeInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TileMemorySizeInfoQCOM & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTileMemorySizeInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTileMemorySizeInfoQCOM*>( this );
}
operator VkTileMemorySizeInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTileMemorySizeInfoQCOM*>( this );
}
operator VkTileMemorySizeInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTileMemorySizeInfoQCOM*>( this );
}
operator VkTileMemorySizeInfoQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTileMemorySizeInfoQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, size );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TileMemorySizeInfoQCOM const & ) const = default;
#else
bool operator==( TileMemorySizeInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( size == rhs.size );
#endif
}
bool operator!=( TileMemorySizeInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTileMemorySizeInfoQCOM;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
template <>
struct CppType<StructureType, StructureType::eTileMemorySizeInfoQCOM>
{
using Type = TileMemorySizeInfoQCOM;
};
// wrapper struct for struct VkTilePropertiesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTilePropertiesQCOM.html
struct TilePropertiesQCOM
{
using NativeType = VkTilePropertiesQCOM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTilePropertiesQCOM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Extent3D tileSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D apronSize_ = {}, VULKAN_HPP_NAMESPACE::Offset2D origin_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tileSize{ tileSize_ }, apronSize{ apronSize_ }, origin{ origin_ }
{}
VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TilePropertiesQCOM( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: TilePropertiesQCOM( *reinterpret_cast<TilePropertiesQCOM const *>( &rhs ) )
{}
TilePropertiesQCOM & operator=( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TilePropertiesQCOM & operator=( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setTileSize( VULKAN_HPP_NAMESPACE::Extent3D const & tileSize_ ) VULKAN_HPP_NOEXCEPT
{
tileSize = tileSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setApronSize( VULKAN_HPP_NAMESPACE::Extent2D const & apronSize_ ) VULKAN_HPP_NOEXCEPT
{
apronSize = apronSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setOrigin( VULKAN_HPP_NAMESPACE::Offset2D const & origin_ ) VULKAN_HPP_NOEXCEPT
{
origin = origin_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTilePropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTilePropertiesQCOM*>( this );
}
operator VkTilePropertiesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTilePropertiesQCOM*>( this );
}
operator VkTilePropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTilePropertiesQCOM*>( this );
}
operator VkTilePropertiesQCOM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTilePropertiesQCOM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent3D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Offset2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tileSize, apronSize, origin );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TilePropertiesQCOM const & ) const = default;
#else
bool operator==( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tileSize == rhs.tileSize )
&& ( apronSize == rhs.apronSize )
&& ( origin == rhs.origin );
#endif
}
bool operator!=( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTilePropertiesQCOM;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent3D tileSize = {};
VULKAN_HPP_NAMESPACE::Extent2D apronSize = {};
VULKAN_HPP_NAMESPACE::Offset2D origin = {};
};
template <>
struct CppType<StructureType, StructureType::eTilePropertiesQCOM>
{
using Type = TilePropertiesQCOM;
};
// wrapper struct for struct VkTimelineSemaphoreSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTimelineSemaphoreSubmitInfo.html
struct TimelineSemaphoreSubmitInfo
{
using NativeType = VkTimelineSemaphoreSubmitInfo;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo(uint32_t waitSemaphoreValueCount_ = {}, const uint64_t * pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValueCount_ = {}, const uint64_t * pSignalSemaphoreValues_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, waitSemaphoreValueCount{ waitSemaphoreValueCount_ }, pWaitSemaphoreValues{ pWaitSemaphoreValues_ }, signalSemaphoreValueCount{ signalSemaphoreValueCount_ }, pSignalSemaphoreValues{ pSignalSemaphoreValues_ }
{}
VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: TimelineSemaphoreSubmitInfo( *reinterpret_cast<TimelineSemaphoreSubmitInfo const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), waitSemaphoreValueCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValueCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreValueCount = waitSemaphoreValueCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphoreValues = pWaitSemaphoreValues_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreValueCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
pWaitSemaphoreValues = waitSemaphoreValues_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreValueCount = signalSemaphoreValueCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphoreValues = pSignalSemaphoreValues_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreValueCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
pSignalSemaphoreValues = signalSemaphoreValues_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfo*>( this );
}
operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTimelineSemaphoreSubmitInfo*>( this );
}
operator VkTimelineSemaphoreSubmitInfo const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTimelineSemaphoreSubmitInfo*>( this );
}
operator VkTimelineSemaphoreSubmitInfo *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTimelineSemaphoreSubmitInfo*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &, uint32_t const &, const uint64_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, waitSemaphoreValueCount, pWaitSemaphoreValues, signalSemaphoreValueCount, pSignalSemaphoreValues );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default;
#else
bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount )
&& ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
&& ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount )
&& ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
#endif
}
bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo;
const void * pNext = {};
uint32_t waitSemaphoreValueCount = {};
const uint64_t * pWaitSemaphoreValues = {};
uint32_t signalSemaphoreValueCount = {};
const uint64_t * pSignalSemaphoreValues = {};
};
template <>
struct CppType<StructureType, StructureType::eTimelineSemaphoreSubmitInfo>
{
using Type = TimelineSemaphoreSubmitInfo;
};
using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;
// wrapper struct for struct VkTraceRaysIndirectCommand2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTraceRaysIndirectCommand2KHR.html
struct TraceRaysIndirectCommand2KHR
{
using NativeType = VkTraceRaysIndirectCommand2KHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR(VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
: raygenShaderRecordAddress{ raygenShaderRecordAddress_ }, raygenShaderRecordSize{ raygenShaderRecordSize_ }, missShaderBindingTableAddress{ missShaderBindingTableAddress_ }, missShaderBindingTableSize{ missShaderBindingTableSize_ }, missShaderBindingTableStride{ missShaderBindingTableStride_ }, hitShaderBindingTableAddress{ hitShaderBindingTableAddress_ }, hitShaderBindingTableSize{ hitShaderBindingTableSize_ }, hitShaderBindingTableStride{ hitShaderBindingTableStride_ }, callableShaderBindingTableAddress{ callableShaderBindingTableAddress_ }, callableShaderBindingTableSize{ callableShaderBindingTableSize_ }, callableShaderBindingTableStride{ callableShaderBindingTableStride_ }, width{ width_ }, height{ height_ }, depth{ depth_ }
{}
VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TraceRaysIndirectCommand2KHR( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: TraceRaysIndirectCommand2KHR( *reinterpret_cast<TraceRaysIndirectCommand2KHR const *>( &rhs ) )
{}
TraceRaysIndirectCommand2KHR & operator=( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TraceRaysIndirectCommand2KHR & operator=( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setRaygenShaderRecordAddress( VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ ) VULKAN_HPP_NOEXCEPT
{
raygenShaderRecordAddress = raygenShaderRecordAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setRaygenShaderRecordSize( VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ ) VULKAN_HPP_NOEXCEPT
{
raygenShaderRecordSize = raygenShaderRecordSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setMissShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT
{
missShaderBindingTableAddress = missShaderBindingTableAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setMissShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT
{
missShaderBindingTableSize = missShaderBindingTableSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setMissShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT
{
missShaderBindingTableStride = missShaderBindingTableStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHitShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT
{
hitShaderBindingTableAddress = hitShaderBindingTableAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHitShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT
{
hitShaderBindingTableSize = hitShaderBindingTableSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHitShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT
{
hitShaderBindingTableStride = hitShaderBindingTableStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setCallableShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT
{
callableShaderBindingTableAddress = callableShaderBindingTableAddress_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setCallableShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT
{
callableShaderBindingTableSize = callableShaderBindingTableSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setCallableShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT
{
callableShaderBindingTableStride = callableShaderBindingTableStride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
{
width = width_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
{
height = height_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
{
depth = depth_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTraceRaysIndirectCommand2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTraceRaysIndirectCommand2KHR*>( this );
}
operator VkTraceRaysIndirectCommand2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTraceRaysIndirectCommand2KHR*>( this );
}
operator VkTraceRaysIndirectCommand2KHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTraceRaysIndirectCommand2KHR*>( this );
}
operator VkTraceRaysIndirectCommand2KHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTraceRaysIndirectCommand2KHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( raygenShaderRecordAddress, raygenShaderRecordSize, missShaderBindingTableAddress, missShaderBindingTableSize, missShaderBindingTableStride, hitShaderBindingTableAddress, hitShaderBindingTableSize, hitShaderBindingTableStride, callableShaderBindingTableAddress, callableShaderBindingTableSize, callableShaderBindingTableStride, width, height, depth );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TraceRaysIndirectCommand2KHR const & ) const = default;
#else
bool operator==( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( raygenShaderRecordAddress == rhs.raygenShaderRecordAddress )
&& ( raygenShaderRecordSize == rhs.raygenShaderRecordSize )
&& ( missShaderBindingTableAddress == rhs.missShaderBindingTableAddress )
&& ( missShaderBindingTableSize == rhs.missShaderBindingTableSize )
&& ( missShaderBindingTableStride == rhs.missShaderBindingTableStride )
&& ( hitShaderBindingTableAddress == rhs.hitShaderBindingTableAddress )
&& ( hitShaderBindingTableSize == rhs.hitShaderBindingTableSize )
&& ( hitShaderBindingTableStride == rhs.hitShaderBindingTableStride )
&& ( callableShaderBindingTableAddress == rhs.callableShaderBindingTableAddress )
&& ( callableShaderBindingTableSize == rhs.callableShaderBindingTableSize )
&& ( callableShaderBindingTableStride == rhs.callableShaderBindingTableStride )
&& ( width == rhs.width )
&& ( height == rhs.height )
&& ( depth == rhs.depth );
#endif
}
bool operator!=( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize = {};
VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride = {};
VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride = {};
VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride = {};
uint32_t width = {};
uint32_t height = {};
uint32_t depth = {};
};
// wrapper struct for struct VkTraceRaysIndirectCommandKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTraceRaysIndirectCommandKHR.html
struct TraceRaysIndirectCommandKHR
{
using NativeType = VkTraceRaysIndirectCommandKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
: width{ width_ }, height{ height_ }, depth{ depth_ }
{}
VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: TraceRaysIndirectCommandKHR( *reinterpret_cast<TraceRaysIndirectCommandKHR const *>( &rhs ) )
{}
explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} )
: width( extent2D.width )
, height( extent2D.height )
, depth( depth_ )
{}
TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
{
width = width_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
{
height = height_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
{
depth = depth_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTraceRaysIndirectCommandKHR*>( this );
}
operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTraceRaysIndirectCommandKHR*>( this );
}
operator VkTraceRaysIndirectCommandKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkTraceRaysIndirectCommandKHR*>( this );
}
operator VkTraceRaysIndirectCommandKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkTraceRaysIndirectCommandKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( width, height, depth );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default;
#else
bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( width == rhs.width )
&& ( height == rhs.height )
&& ( depth == rhs.depth );
#endif
}
bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t width = {};
uint32_t height = {};
uint32_t depth = {};
};
// wrapper struct for struct VkValidationCacheCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationCacheCreateInfoEXT.html
struct ValidationCacheCreateInfoEXT
{
using NativeType = VkValidationCacheCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, size_t initialDataSize_ = {}, const void * pInitialData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, initialDataSize{ initialDataSize_ }, pInitialData{ pInitialData_ }
{}
VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ValidationCacheCreateInfoEXT( *reinterpret_cast<ValidationCacheCreateInfoEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
{
initialDataSize = initialDataSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
{
pInitialData = pInitialData_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
ValidationCacheCreateInfoEXT & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
{
initialDataSize = initialData_.size() * sizeof(T);
pInitialData = initialData_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( this );
}
operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkValidationCacheCreateInfoEXT*>( this );
}
operator VkValidationCacheCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( this );
}
operator VkValidationCacheCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkValidationCacheCreateInfoEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT const &, size_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, initialDataSize, pInitialData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default;
#else
bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( initialDataSize == rhs.initialDataSize )
&& ( pInitialData == rhs.pInitialData );
#endif
}
bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {};
size_t initialDataSize = {};
const void * pInitialData = {};
};
template <>
struct CppType<StructureType, StructureType::eValidationCacheCreateInfoEXT>
{
using Type = ValidationCacheCreateInfoEXT;
};
// wrapper struct for struct VkValidationFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationFeaturesEXT.html
struct ValidationFeaturesEXT
{
using NativeType = VkValidationFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT(uint32_t enabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {}, uint32_t disabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, enabledValidationFeatureCount{ enabledValidationFeatureCount_ }, pEnabledValidationFeatures{ pEnabledValidationFeatures_ }, disabledValidationFeatureCount{ disabledValidationFeatureCount_ }, pDisabledValidationFeatures{ pDisabledValidationFeatures_ }
{}
VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ValidationFeaturesEXT( *reinterpret_cast<ValidationFeaturesEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ValidationFeaturesEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), enabledValidationFeatureCount( static_cast<uint32_t>( enabledValidationFeatures_.size() ) ), pEnabledValidationFeatures( enabledValidationFeatures_.data() ), disabledValidationFeatureCount( static_cast<uint32_t>( disabledValidationFeatures_.size() ) ), pDisabledValidationFeatures( disabledValidationFeatures_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
{
enabledValidationFeatureCount = enabledValidationFeatureCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
{
pEnabledValidationFeatures = pEnabledValidationFeatures_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ValidationFeaturesEXT & setEnabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
{
enabledValidationFeatureCount = static_cast<uint32_t>( enabledValidationFeatures_.size() );
pEnabledValidationFeatures = enabledValidationFeatures_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
{
disabledValidationFeatureCount = disabledValidationFeatureCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
{
pDisabledValidationFeatures = pDisabledValidationFeatures_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ValidationFeaturesEXT & setDisabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
{
disabledValidationFeatureCount = static_cast<uint32_t>( disabledValidationFeatures_.size() );
pDisabledValidationFeatures = disabledValidationFeatures_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkValidationFeaturesEXT*>( this );
}
operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkValidationFeaturesEXT*>( this );
}
operator VkValidationFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkValidationFeaturesEXT*>( this );
}
operator VkValidationFeaturesEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkValidationFeaturesEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, enabledValidationFeatureCount, pEnabledValidationFeatures, disabledValidationFeatureCount, pDisabledValidationFeatures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ValidationFeaturesEXT const & ) const = default;
#else
bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount )
&& ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures )
&& ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount )
&& ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures );
#endif
}
bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT;
const void * pNext = {};
uint32_t enabledValidationFeatureCount = {};
const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {};
uint32_t disabledValidationFeatureCount = {};
const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {};
};
template <>
struct CppType<StructureType, StructureType::eValidationFeaturesEXT>
{
using Type = ValidationFeaturesEXT;
};
// wrapper struct for struct VkValidationFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationFlagsEXT.html
struct ValidationFlagsEXT
{
using NativeType = VkValidationFlagsEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ValidationFlagsEXT(uint32_t disabledValidationCheckCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, disabledValidationCheckCount{ disabledValidationCheckCount_ }, pDisabledValidationChecks{ pDisabledValidationChecks_ }
{}
VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ValidationFlagsEXT( *reinterpret_cast<ValidationFlagsEXT const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ValidationFlagsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_, const void * pNext_ = nullptr )
: pNext( pNext_ ), disabledValidationCheckCount( static_cast<uint32_t>( disabledValidationChecks_.size() ) ), pDisabledValidationChecks( disabledValidationChecks_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT
{
disabledValidationCheckCount = disabledValidationCheckCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
{
pDisabledValidationChecks = pDisabledValidationChecks_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ValidationFlagsEXT & setDisabledValidationChecks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
{
disabledValidationCheckCount = static_cast<uint32_t>( disabledValidationChecks_.size() );
pDisabledValidationChecks = disabledValidationChecks_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkValidationFlagsEXT*>( this );
}
operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkValidationFlagsEXT*>( this );
}
operator VkValidationFlagsEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkValidationFlagsEXT*>( this );
}
operator VkValidationFlagsEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkValidationFlagsEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, disabledValidationCheckCount, pDisabledValidationChecks );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ValidationFlagsEXT const & ) const = default;
#else
bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
&& ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
#endif
}
bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT;
const void * pNext = {};
uint32_t disabledValidationCheckCount = {};
const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks = {};
};
template <>
struct CppType<StructureType, StructureType::eValidationFlagsEXT>
{
using Type = ValidationFlagsEXT;
};
// wrapper struct for struct VkVertexInputAttributeDescription2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputAttributeDescription2EXT.html
struct VertexInputAttributeDescription2EXT
{
using NativeType = VkVertexInputAttributeDescription2EXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputAttributeDescription2EXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT(uint32_t location_ = {}, uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, location{ location_ }, binding{ binding_ }, format{ format_ }, offset{ offset_ }
{}
VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VertexInputAttributeDescription2EXT( *reinterpret_cast<VertexInputAttributeDescription2EXT const *>( &rhs ) )
{}
VertexInputAttributeDescription2EXT & operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VertexInputAttributeDescription2EXT & operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
{
location = location_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
{
binding = binding_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVertexInputAttributeDescription2EXT*>( this );
}
operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVertexInputAttributeDescription2EXT*>( this );
}
operator VkVertexInputAttributeDescription2EXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVertexInputAttributeDescription2EXT*>( this );
}
operator VkVertexInputAttributeDescription2EXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVertexInputAttributeDescription2EXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Format const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, location, binding, format, offset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default;
#else
bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( location == rhs.location )
&& ( binding == rhs.binding )
&& ( format == rhs.format )
&& ( offset == rhs.offset );
#endif
}
bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputAttributeDescription2EXT;
void * pNext = {};
uint32_t location = {};
uint32_t binding = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
uint32_t offset = {};
};
template <>
struct CppType<StructureType, StructureType::eVertexInputAttributeDescription2EXT>
{
using Type = VertexInputAttributeDescription2EXT;
};
// wrapper struct for struct VkVertexInputBindingDescription2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputBindingDescription2EXT.html
struct VertexInputBindingDescription2EXT
{
using NativeType = VkVertexInputBindingDescription2EXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputBindingDescription2EXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT(uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex, uint32_t divisor_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, binding{ binding_ }, stride{ stride_ }, inputRate{ inputRate_ }, divisor{ divisor_ }
{}
VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VertexInputBindingDescription2EXT( *reinterpret_cast<VertexInputBindingDescription2EXT const *>( &rhs ) )
{}
VertexInputBindingDescription2EXT & operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
{
binding = binding_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
{
stride = stride_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
{
inputRate = inputRate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
{
divisor = divisor_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVertexInputBindingDescription2EXT*>( this );
}
operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVertexInputBindingDescription2EXT*>( this );
}
operator VkVertexInputBindingDescription2EXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVertexInputBindingDescription2EXT*>( this );
}
operator VkVertexInputBindingDescription2EXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVertexInputBindingDescription2EXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VertexInputRate const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, binding, stride, inputRate, divisor );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default;
#else
bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( binding == rhs.binding )
&& ( stride == rhs.stride )
&& ( inputRate == rhs.inputRate )
&& ( divisor == rhs.divisor );
#endif
}
bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputBindingDescription2EXT;
void * pNext = {};
uint32_t binding = {};
uint32_t stride = {};
VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
uint32_t divisor = {};
};
template <>
struct CppType<StructureType, StructureType::eVertexInputBindingDescription2EXT>
{
using Type = VertexInputBindingDescription2EXT;
};
#if defined( VK_USE_PLATFORM_VI_NN )
// wrapper struct for struct VkViSurfaceCreateInfoNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViSurfaceCreateInfoNN.html
struct ViSurfaceCreateInfoNN
{
using NativeType = VkViSurfaceCreateInfoNN;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN(VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void * window_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, window{ window_ }
{}
VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
: ViSurfaceCreateInfoNN( *reinterpret_cast<ViSurfaceCreateInfoNN const *>( &rhs ) )
{}
ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT
{
window = window_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkViSurfaceCreateInfoNN*>( this );
}
operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkViSurfaceCreateInfoNN*>( this );
}
operator VkViSurfaceCreateInfoNN const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkViSurfaceCreateInfoNN*>( this );
}
operator VkViSurfaceCreateInfoNN *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkViSurfaceCreateInfoNN*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN const &, void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, window );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default;
#else
bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( window == rhs.window );
#endif
}
bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {};
void * window = {};
};
template <>
struct CppType<StructureType, StructureType::eViSurfaceCreateInfoNN>
{
using Type = ViSurfaceCreateInfoNN;
};
#endif /*VK_USE_PLATFORM_VI_NN*/
// wrapper struct for struct VkVideoPictureResourceInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoPictureResourceInfoKHR.html
struct VideoPictureResourceInfoKHR
{
using NativeType = VkVideoPictureResourceInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR(VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, uint32_t baseArrayLayer_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, codedOffset{ codedOffset_ }, codedExtent{ codedExtent_ }, baseArrayLayer{ baseArrayLayer_ }, imageViewBinding{ imageViewBinding_ }
{}
VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoPictureResourceInfoKHR( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoPictureResourceInfoKHR( *reinterpret_cast<VideoPictureResourceInfoKHR const *>( &rhs ) )
{}
VideoPictureResourceInfoKHR & operator=( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoPictureResourceInfoKHR & operator=( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT
{
codedOffset = codedOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
{
codedExtent = codedExtent_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
{
baseArrayLayer = baseArrayLayer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT
{
imageViewBinding = imageViewBinding_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoPictureResourceInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoPictureResourceInfoKHR*>( this );
}
operator VkVideoPictureResourceInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoPictureResourceInfoKHR*>( this );
}
operator VkVideoPictureResourceInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoPictureResourceInfoKHR*>( this );
}
operator VkVideoPictureResourceInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoPictureResourceInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageView const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, codedOffset, codedExtent, baseArrayLayer, imageViewBinding );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoPictureResourceInfoKHR const & ) const = default;
#else
bool operator==( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( codedOffset == rhs.codedOffset )
&& ( codedExtent == rhs.codedExtent )
&& ( baseArrayLayer == rhs.baseArrayLayer )
&& ( imageViewBinding == rhs.imageViewBinding );
#endif
}
bool operator!=( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {};
VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {};
uint32_t baseArrayLayer = {};
VULKAN_HPP_NAMESPACE::ImageView imageViewBinding = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoPictureResourceInfoKHR>
{
using Type = VideoPictureResourceInfoKHR;
};
// wrapper struct for struct VkVideoReferenceSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoReferenceSlotInfoKHR.html
struct VideoReferenceSlotInfoKHR
{
using NativeType = VkVideoReferenceSlotInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR(int32_t slotIndex_ = {}, const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, slotIndex{ slotIndex_ }, pPictureResource{ pPictureResource_ }
{}
VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoReferenceSlotInfoKHR( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoReferenceSlotInfoKHR( *reinterpret_cast<VideoReferenceSlotInfoKHR const *>( &rhs ) )
{}
VideoReferenceSlotInfoKHR & operator=( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoReferenceSlotInfoKHR & operator=( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setSlotIndex( int32_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
{
slotIndex = slotIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT
{
pPictureResource = pPictureResource_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoReferenceSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoReferenceSlotInfoKHR*>( this );
}
operator VkVideoReferenceSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoReferenceSlotInfoKHR*>( this );
}
operator VkVideoReferenceSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoReferenceSlotInfoKHR*>( this );
}
operator VkVideoReferenceSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoReferenceSlotInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, int32_t const &, const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, slotIndex, pPictureResource );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoReferenceSlotInfoKHR const & ) const = default;
#else
bool operator==( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( slotIndex == rhs.slotIndex )
&& ( pPictureResource == rhs.pPictureResource );
#endif
}
bool operator!=( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotInfoKHR;
const void * pNext = {};
int32_t slotIndex = {};
const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoReferenceSlotInfoKHR>
{
using Type = VideoReferenceSlotInfoKHR;
};
// wrapper struct for struct VkVideoBeginCodingInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoBeginCodingInfoKHR.html
struct VideoBeginCodingInfoKHR
{
using NativeType = VkVideoBeginCodingInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR(VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, videoSession{ videoSession_ }, videoSessionParameters{ videoSessionParameters_ }, referenceSlotCount{ referenceSlotCount_ }, pReferenceSlots{ pReferenceSlots_ }
{}
VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoBeginCodingInfoKHR( *reinterpret_cast<VideoBeginCodingInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), videoSession( videoSession_ ), videoSessionParameters( videoSessionParameters_ ), referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) ), pReferenceSlots( referenceSlots_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoBeginCodingInfoKHR & operator=( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoBeginCodingInfoKHR & operator=( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
{
videoSession = videoSession_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSessionParameters( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT
{
videoSessionParameters = videoSessionParameters_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
{
referenceSlotCount = referenceSlotCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
{
pReferenceSlots = pReferenceSlots_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoBeginCodingInfoKHR & setReferenceSlots( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
{
referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
pReferenceSlots = referenceSlots_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoBeginCodingInfoKHR*>( this );
}
operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoBeginCodingInfoKHR*>( this );
}
operator VkVideoBeginCodingInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoBeginCodingInfoKHR*>( this );
}
operator VkVideoBeginCodingInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoBeginCodingInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoSessionKHR const &, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, videoSession, videoSessionParameters, referenceSlotCount, pReferenceSlots );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoBeginCodingInfoKHR const & ) const = default;
#else
bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( videoSession == rhs.videoSession )
&& ( videoSessionParameters == rhs.videoSessionParameters )
&& ( referenceSlotCount == rhs.referenceSlotCount )
&& ( pReferenceSlots == rhs.pReferenceSlots );
#endif
}
bool operator!=( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {};
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {};
uint32_t referenceSlotCount = {};
const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoBeginCodingInfoKHR>
{
using Type = VideoBeginCodingInfoKHR;
};
// wrapper struct for struct VkVideoCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoCapabilitiesKHR.html
struct VideoCapabilitiesKHR
{
using NativeType = VkVideoCapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::Extent2D pictureAccessGranularity_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minCodedExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, uint32_t maxDpbSlots_ = {}, uint32_t maxActiveReferencePictures_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, minBitstreamBufferOffsetAlignment{ minBitstreamBufferOffsetAlignment_ }, minBitstreamBufferSizeAlignment{ minBitstreamBufferSizeAlignment_ }, pictureAccessGranularity{ pictureAccessGranularity_ }, minCodedExtent{ minCodedExtent_ }, maxCodedExtent{ maxCodedExtent_ }, maxDpbSlots{ maxDpbSlots_ }, maxActiveReferencePictures{ maxActiveReferencePictures_ }, stdHeaderVersion{ stdHeaderVersion_ }
{}
VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoCapabilitiesKHR( *reinterpret_cast<VideoCapabilitiesKHR const *>( &rhs ) )
{}
VideoCapabilitiesKHR & operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoCapabilitiesKHR & operator=( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoCapabilitiesKHR*>( this );
}
operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoCapabilitiesKHR*>( this );
}
operator VkVideoCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoCapabilitiesKHR*>( this );
}
operator VkVideoCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoCapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ExtensionProperties const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, minBitstreamBufferOffsetAlignment, minBitstreamBufferSizeAlignment, pictureAccessGranularity, minCodedExtent, maxCodedExtent, maxDpbSlots, maxActiveReferencePictures, stdHeaderVersion );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoCapabilitiesKHR const & ) const = default;
#else
bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment )
&& ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment )
&& ( pictureAccessGranularity == rhs.pictureAccessGranularity )
&& ( minCodedExtent == rhs.minCodedExtent )
&& ( maxCodedExtent == rhs.maxCodedExtent )
&& ( maxDpbSlots == rhs.maxDpbSlots )
&& ( maxActiveReferencePictures == rhs.maxActiveReferencePictures )
&& ( stdHeaderVersion == rhs.stdHeaderVersion );
#endif
}
bool operator!=( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment = {};
VULKAN_HPP_NAMESPACE::Extent2D pictureAccessGranularity = {};
VULKAN_HPP_NAMESPACE::Extent2D minCodedExtent = {};
VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {};
uint32_t maxDpbSlots = {};
uint32_t maxActiveReferencePictures = {};
VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoCapabilitiesKHR>
{
using Type = VideoCapabilitiesKHR;
};
// wrapper struct for struct VkVideoCodingControlInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoCodingControlInfoKHR.html
struct VideoCodingControlInfoKHR
{
using NativeType = VkVideoCodingControlInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR(VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoCodingControlInfoKHR( *reinterpret_cast<VideoCodingControlInfoKHR const *>( &rhs ) )
{}
VideoCodingControlInfoKHR & operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoCodingControlInfoKHR & operator=( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoCodingControlInfoKHR*>( this );
}
operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoCodingControlInfoKHR*>( this );
}
operator VkVideoCodingControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoCodingControlInfoKHR*>( this );
}
operator VkVideoCodingControlInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoCodingControlInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoCodingControlInfoKHR const & ) const = default;
#else
bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCodingControlInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoCodingControlInfoKHR>
{
using Type = VideoCodingControlInfoKHR;
};
// wrapper struct for struct VkVideoDecodeAV1CapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1CapabilitiesKHR.html
struct VideoDecodeAV1CapabilitiesKHR
{
using NativeType = VkVideoDecodeAV1CapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1CapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeAV1CapabilitiesKHR(StdVideoAV1Level maxLevel_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxLevel{ maxLevel_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeAV1CapabilitiesKHR( VideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeAV1CapabilitiesKHR( VkVideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeAV1CapabilitiesKHR( *reinterpret_cast<VideoDecodeAV1CapabilitiesKHR const *>( &rhs ) )
{}
VideoDecodeAV1CapabilitiesKHR & operator=( VideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeAV1CapabilitiesKHR & operator=( VkVideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeAV1CapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoDecodeAV1CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeAV1CapabilitiesKHR*>( this );
}
operator VkVideoDecodeAV1CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeAV1CapabilitiesKHR*>( this );
}
operator VkVideoDecodeAV1CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeAV1CapabilitiesKHR*>( this );
}
operator VkVideoDecodeAV1CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeAV1CapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, StdVideoAV1Level const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxLevel );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoDecodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoDecodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 );
}
bool operator!=( VideoDecodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1CapabilitiesKHR;
void * pNext = {};
StdVideoAV1Level maxLevel = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeAv1CapabilitiesKHR>
{
using Type = VideoDecodeAV1CapabilitiesKHR;
};
// wrapper struct for struct VkVideoDecodeAV1DpbSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1DpbSlotInfoKHR.html
struct VideoDecodeAV1DpbSlotInfoKHR
{
using NativeType = VkVideoDecodeAV1DpbSlotInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1DpbSlotInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeAV1DpbSlotInfoKHR(const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdReferenceInfo{ pStdReferenceInfo_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeAV1DpbSlotInfoKHR( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeAV1DpbSlotInfoKHR( VkVideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeAV1DpbSlotInfoKHR( *reinterpret_cast<VideoDecodeAV1DpbSlotInfoKHR const *>( &rhs ) )
{}
VideoDecodeAV1DpbSlotInfoKHR & operator=( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeAV1DpbSlotInfoKHR & operator=( VkVideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeAV1DpbSlotInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdReferenceInfo = pStdReferenceInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeAV1DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeAV1DpbSlotInfoKHR*>( this );
}
operator VkVideoDecodeAV1DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeAV1DpbSlotInfoKHR*>( this );
}
operator VkVideoDecodeAV1DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeAV1DpbSlotInfoKHR*>( this );
}
operator VkVideoDecodeAV1DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeAV1DpbSlotInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeAV1ReferenceInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdReferenceInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeAV1DpbSlotInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdReferenceInfo == rhs.pStdReferenceInfo );
#endif
}
bool operator!=( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1DpbSlotInfoKHR;
const void * pNext = {};
const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeAv1DpbSlotInfoKHR>
{
using Type = VideoDecodeAV1DpbSlotInfoKHR;
};
// wrapper struct for struct VkVideoDecodeAV1InlineSessionParametersInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1InlineSessionParametersInfoKHR.html
struct VideoDecodeAV1InlineSessionParametersInfoKHR
{
using NativeType = VkVideoDecodeAV1InlineSessionParametersInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1InlineSessionParametersInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeAV1InlineSessionParametersInfoKHR(const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdSequenceHeader{ pStdSequenceHeader_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeAV1InlineSessionParametersInfoKHR( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeAV1InlineSessionParametersInfoKHR( VkVideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeAV1InlineSessionParametersInfoKHR( *reinterpret_cast<VideoDecodeAV1InlineSessionParametersInfoKHR const *>( &rhs ) )
{}
VideoDecodeAV1InlineSessionParametersInfoKHR & operator=( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeAV1InlineSessionParametersInfoKHR & operator=( VkVideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeAV1InlineSessionParametersInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1InlineSessionParametersInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1InlineSessionParametersInfoKHR & setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT
{
pStdSequenceHeader = pStdSequenceHeader_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeAV1InlineSessionParametersInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeAV1InlineSessionParametersInfoKHR*>( this );
}
operator VkVideoDecodeAV1InlineSessionParametersInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeAV1InlineSessionParametersInfoKHR*>( this );
}
operator VkVideoDecodeAV1InlineSessionParametersInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeAV1InlineSessionParametersInfoKHR*>( this );
}
operator VkVideoDecodeAV1InlineSessionParametersInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeAV1InlineSessionParametersInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoAV1SequenceHeader * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdSequenceHeader );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeAV1InlineSessionParametersInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdSequenceHeader == rhs.pStdSequenceHeader );
#endif
}
bool operator!=( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1InlineSessionParametersInfoKHR;
const void * pNext = {};
const StdVideoAV1SequenceHeader * pStdSequenceHeader = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeAv1InlineSessionParametersInfoKHR>
{
using Type = VideoDecodeAV1InlineSessionParametersInfoKHR;
};
// wrapper struct for struct VkVideoDecodeAV1PictureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1PictureInfoKHR.html
struct VideoDecodeAV1PictureInfoKHR
{
using NativeType = VkVideoDecodeAV1PictureInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1PictureInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR(const StdVideoDecodeAV1PictureInfo * pStdPictureInfo_ = {}, std::array<int32_t,VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> const & referenceNameSlotIndices_ = {}, uint32_t frameHeaderOffset_ = {}, uint32_t tileCount_ = {}, const uint32_t * pTileOffsets_ = {}, const uint32_t * pTileSizes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdPictureInfo{ pStdPictureInfo_ }, referenceNameSlotIndices{ referenceNameSlotIndices_ }, frameHeaderOffset{ frameHeaderOffset_ }, tileCount{ tileCount_ }, pTileOffsets{ pTileOffsets_ }, pTileSizes{ pTileSizes_ }
{}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR( VideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeAV1PictureInfoKHR( VkVideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeAV1PictureInfoKHR( *reinterpret_cast<VideoDecodeAV1PictureInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeAV1PictureInfoKHR( const StdVideoDecodeAV1PictureInfo * pStdPictureInfo_, std::array<int32_t,VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> const & referenceNameSlotIndices_, uint32_t frameHeaderOffset_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & tileOffsets_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & tileSizes_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), referenceNameSlotIndices( referenceNameSlotIndices_ ), frameHeaderOffset( frameHeaderOffset_ ), tileCount( static_cast<uint32_t>( tileOffsets_.size() ) ), pTileOffsets( tileOffsets_.data() ), pTileSizes( tileSizes_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( tileOffsets_.size() == tileSizes_.size() );
#else
if ( tileOffsets_.size() != tileSizes_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::VideoDecodeAV1PictureInfoKHR::VideoDecodeAV1PictureInfoKHR: tileOffsets_.size() != tileSizes_.size()" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoDecodeAV1PictureInfoKHR & operator=( VideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeAV1PictureInfoKHR & operator=( VkVideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeAV1PictureInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeAV1PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdPictureInfo = pStdPictureInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setReferenceNameSlotIndices( std::array<int32_t,VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> referenceNameSlotIndices_ ) VULKAN_HPP_NOEXCEPT
{
referenceNameSlotIndices = referenceNameSlotIndices_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setFrameHeaderOffset( uint32_t frameHeaderOffset_ ) VULKAN_HPP_NOEXCEPT
{
frameHeaderOffset = frameHeaderOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setTileCount( uint32_t tileCount_ ) VULKAN_HPP_NOEXCEPT
{
tileCount = tileCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPTileOffsets( const uint32_t * pTileOffsets_ ) VULKAN_HPP_NOEXCEPT
{
pTileOffsets = pTileOffsets_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeAV1PictureInfoKHR & setTileOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & tileOffsets_ ) VULKAN_HPP_NOEXCEPT
{
tileCount = static_cast<uint32_t>( tileOffsets_.size() );
pTileOffsets = tileOffsets_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPTileSizes( const uint32_t * pTileSizes_ ) VULKAN_HPP_NOEXCEPT
{
pTileSizes = pTileSizes_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeAV1PictureInfoKHR & setTileSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & tileSizes_ ) VULKAN_HPP_NOEXCEPT
{
tileCount = static_cast<uint32_t>( tileSizes_.size() );
pTileSizes = tileSizes_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeAV1PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeAV1PictureInfoKHR*>( this );
}
operator VkVideoDecodeAV1PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeAV1PictureInfoKHR*>( this );
}
operator VkVideoDecodeAV1PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeAV1PictureInfoKHR*>( this );
}
operator VkVideoDecodeAV1PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeAV1PictureInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeAV1PictureInfo * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> const &, uint32_t const &, uint32_t const &, const uint32_t * const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdPictureInfo, referenceNameSlotIndices, frameHeaderOffset, tileCount, pTileOffsets, pTileSizes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeAV1PictureInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdPictureInfo == rhs.pStdPictureInfo )
&& ( referenceNameSlotIndices == rhs.referenceNameSlotIndices )
&& ( frameHeaderOffset == rhs.frameHeaderOffset )
&& ( tileCount == rhs.tileCount )
&& ( pTileOffsets == rhs.pTileOffsets )
&& ( pTileSizes == rhs.pTileSizes );
#endif
}
bool operator!=( VideoDecodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1PictureInfoKHR;
const void * pNext = {};
const StdVideoDecodeAV1PictureInfo * pStdPictureInfo = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> referenceNameSlotIndices = {};
uint32_t frameHeaderOffset = {};
uint32_t tileCount = {};
const uint32_t * pTileOffsets = {};
const uint32_t * pTileSizes = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeAv1PictureInfoKHR>
{
using Type = VideoDecodeAV1PictureInfoKHR;
};
// wrapper struct for struct VkVideoDecodeAV1ProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1ProfileInfoKHR.html
struct VideoDecodeAV1ProfileInfoKHR
{
using NativeType = VkVideoDecodeAV1ProfileInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1ProfileInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeAV1ProfileInfoKHR(StdVideoAV1Profile stdProfile_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filmGrainSupport_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stdProfile{ stdProfile_ }, filmGrainSupport{ filmGrainSupport_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeAV1ProfileInfoKHR( VideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeAV1ProfileInfoKHR( VkVideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeAV1ProfileInfoKHR( *reinterpret_cast<VideoDecodeAV1ProfileInfoKHR const *>( &rhs ) )
{}
VideoDecodeAV1ProfileInfoKHR & operator=( VideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeAV1ProfileInfoKHR & operator=( VkVideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeAV1ProfileInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1ProfileInfoKHR & setStdProfile( StdVideoAV1Profile stdProfile_ ) VULKAN_HPP_NOEXCEPT
{
stdProfile = stdProfile_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1ProfileInfoKHR & setFilmGrainSupport( VULKAN_HPP_NAMESPACE::Bool32 filmGrainSupport_ ) VULKAN_HPP_NOEXCEPT
{
filmGrainSupport = filmGrainSupport_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeAV1ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeAV1ProfileInfoKHR*>( this );
}
operator VkVideoDecodeAV1ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeAV1ProfileInfoKHR*>( this );
}
operator VkVideoDecodeAV1ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeAV1ProfileInfoKHR*>( this );
}
operator VkVideoDecodeAV1ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeAV1ProfileInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoAV1Profile const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stdProfile, filmGrainSupport );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoDecodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = filmGrainSupport <=> rhs.filmGrainSupport; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoDecodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ) == 0 )
&& ( filmGrainSupport == rhs.filmGrainSupport );
}
bool operator!=( VideoDecodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1ProfileInfoKHR;
const void * pNext = {};
StdVideoAV1Profile stdProfile = {};
VULKAN_HPP_NAMESPACE::Bool32 filmGrainSupport = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeAv1ProfileInfoKHR>
{
using Type = VideoDecodeAV1ProfileInfoKHR;
};
// wrapper struct for struct VkVideoDecodeAV1SessionParametersCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1SessionParametersCreateInfoKHR.html
struct VideoDecodeAV1SessionParametersCreateInfoKHR
{
using NativeType = VkVideoDecodeAV1SessionParametersCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeAV1SessionParametersCreateInfoKHR(const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdSequenceHeader{ pStdSequenceHeader_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeAV1SessionParametersCreateInfoKHR( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeAV1SessionParametersCreateInfoKHR( VkVideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeAV1SessionParametersCreateInfoKHR( *reinterpret_cast<VideoDecodeAV1SessionParametersCreateInfoKHR const *>( &rhs ) )
{}
VideoDecodeAV1SessionParametersCreateInfoKHR & operator=( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeAV1SessionParametersCreateInfoKHR & operator=( VkVideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeAV1SessionParametersCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1SessionParametersCreateInfoKHR & setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT
{
pStdSequenceHeader = pStdSequenceHeader_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeAV1SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeAV1SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoDecodeAV1SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeAV1SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoDecodeAV1SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeAV1SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoDecodeAV1SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeAV1SessionParametersCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoAV1SequenceHeader * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdSequenceHeader );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeAV1SessionParametersCreateInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdSequenceHeader == rhs.pStdSequenceHeader );
#endif
}
bool operator!=( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR;
const void * pNext = {};
const StdVideoAV1SequenceHeader * pStdSequenceHeader = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR>
{
using Type = VideoDecodeAV1SessionParametersCreateInfoKHR;
};
// wrapper struct for struct VkVideoDecodeCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeCapabilitiesKHR.html
struct VideoDecodeCapabilitiesKHR
{
using NativeType = VkVideoDecodeCapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeCapabilitiesKHR( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeCapabilitiesKHR( *reinterpret_cast<VideoDecodeCapabilitiesKHR const *>( &rhs ) )
{}
VideoDecodeCapabilitiesKHR & operator=( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeCapabilitiesKHR & operator=( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoDecodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeCapabilitiesKHR*>( this );
}
operator VkVideoDecodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeCapabilitiesKHR*>( this );
}
operator VkVideoDecodeCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeCapabilitiesKHR*>( this );
}
operator VkVideoDecodeCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeCapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeCapabilitiesKHR const & ) const = default;
#else
bool operator==( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeCapabilitiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeCapabilitiesKHR>
{
using Type = VideoDecodeCapabilitiesKHR;
};
// wrapper struct for struct VkVideoDecodeH264CapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264CapabilitiesKHR.html
struct VideoDecodeH264CapabilitiesKHR
{
using NativeType = VkVideoDecodeH264CapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR(StdVideoH264LevelIdc maxLevelIdc_ = {}, VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxLevelIdc{ maxLevelIdc_ }, fieldOffsetGranularity{ fieldOffsetGranularity_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR( VideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH264CapabilitiesKHR( VkVideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH264CapabilitiesKHR( *reinterpret_cast<VideoDecodeH264CapabilitiesKHR const *>( &rhs ) )
{}
VideoDecodeH264CapabilitiesKHR & operator=( VideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH264CapabilitiesKHR & operator=( VkVideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoDecodeH264CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH264CapabilitiesKHR*>( this );
}
operator VkVideoDecodeH264CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH264CapabilitiesKHR*>( this );
}
operator VkVideoDecodeH264CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH264CapabilitiesKHR*>( this );
}
operator VkVideoDecodeH264CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH264CapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, StdVideoH264LevelIdc const &, VULKAN_HPP_NAMESPACE::Offset2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxLevelIdc, fieldOffsetGranularity );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = fieldOffsetGranularity <=> rhs.fieldOffsetGranularity; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 )
&& ( fieldOffsetGranularity == rhs.fieldOffsetGranularity );
}
bool operator!=( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesKHR;
void * pNext = {};
StdVideoH264LevelIdc maxLevelIdc = {};
VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH264CapabilitiesKHR>
{
using Type = VideoDecodeH264CapabilitiesKHR;
};
// wrapper struct for struct VkVideoDecodeH264DpbSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264DpbSlotInfoKHR.html
struct VideoDecodeH264DpbSlotInfoKHR
{
using NativeType = VkVideoDecodeH264DpbSlotInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR(const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdReferenceInfo{ pStdReferenceInfo_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR( VideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH264DpbSlotInfoKHR( VkVideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH264DpbSlotInfoKHR( *reinterpret_cast<VideoDecodeH264DpbSlotInfoKHR const *>( &rhs ) )
{}
VideoDecodeH264DpbSlotInfoKHR & operator=( VideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH264DpbSlotInfoKHR & operator=( VkVideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdReferenceInfo = pStdReferenceInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeH264DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH264DpbSlotInfoKHR*>( this );
}
operator VkVideoDecodeH264DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH264DpbSlotInfoKHR*>( this );
}
operator VkVideoDecodeH264DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH264DpbSlotInfoKHR*>( this );
}
operator VkVideoDecodeH264DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH264DpbSlotInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH264ReferenceInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdReferenceInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeH264DpbSlotInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdReferenceInfo == rhs.pStdReferenceInfo );
#endif
}
bool operator!=( VideoDecodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoKHR;
const void * pNext = {};
const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH264DpbSlotInfoKHR>
{
using Type = VideoDecodeH264DpbSlotInfoKHR;
};
// wrapper struct for struct VkVideoDecodeH264InlineSessionParametersInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264InlineSessionParametersInfoKHR.html
struct VideoDecodeH264InlineSessionParametersInfoKHR
{
using NativeType = VkVideoDecodeH264InlineSessionParametersInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH264InlineSessionParametersInfoKHR(const StdVideoH264SequenceParameterSet * pStdSPS_ = {}, const StdVideoH264PictureParameterSet * pStdPPS_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdSPS{ pStdSPS_ }, pStdPPS{ pStdPPS_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH264InlineSessionParametersInfoKHR( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH264InlineSessionParametersInfoKHR( VkVideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH264InlineSessionParametersInfoKHR( *reinterpret_cast<VideoDecodeH264InlineSessionParametersInfoKHR const *>( &rhs ) )
{}
VideoDecodeH264InlineSessionParametersInfoKHR & operator=( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH264InlineSessionParametersInfoKHR & operator=( VkVideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264InlineSessionParametersInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264InlineSessionParametersInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264InlineSessionParametersInfoKHR & setPStdSPS( const StdVideoH264SequenceParameterSet * pStdSPS_ ) VULKAN_HPP_NOEXCEPT
{
pStdSPS = pStdSPS_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264InlineSessionParametersInfoKHR & setPStdPPS( const StdVideoH264PictureParameterSet * pStdPPS_ ) VULKAN_HPP_NOEXCEPT
{
pStdPPS = pStdPPS_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeH264InlineSessionParametersInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH264InlineSessionParametersInfoKHR*>( this );
}
operator VkVideoDecodeH264InlineSessionParametersInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH264InlineSessionParametersInfoKHR*>( this );
}
operator VkVideoDecodeH264InlineSessionParametersInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH264InlineSessionParametersInfoKHR*>( this );
}
operator VkVideoDecodeH264InlineSessionParametersInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH264InlineSessionParametersInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoH264SequenceParameterSet * const &, const StdVideoH264PictureParameterSet * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdSPS, pStdPPS );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeH264InlineSessionParametersInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdSPS == rhs.pStdSPS )
&& ( pStdPPS == rhs.pStdPPS );
#endif
}
bool operator!=( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR;
const void * pNext = {};
const StdVideoH264SequenceParameterSet * pStdSPS = {};
const StdVideoH264PictureParameterSet * pStdPPS = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR>
{
using Type = VideoDecodeH264InlineSessionParametersInfoKHR;
};
// wrapper struct for struct VkVideoDecodeH264PictureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264PictureInfoKHR.html
struct VideoDecodeH264PictureInfoKHR
{
using NativeType = VkVideoDecodeH264PictureInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264PictureInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoKHR(const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {}, uint32_t sliceCount_ = {}, const uint32_t * pSliceOffsets_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdPictureInfo{ pStdPictureInfo_ }, sliceCount{ sliceCount_ }, pSliceOffsets{ pSliceOffsets_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoKHR( VideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH264PictureInfoKHR( VkVideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH264PictureInfoKHR( *reinterpret_cast<VideoDecodeH264PictureInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH264PictureInfoKHR( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & sliceOffsets_, const void * pNext_ = nullptr )
: pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), sliceCount( static_cast<uint32_t>( sliceOffsets_.size() ) ), pSliceOffsets( sliceOffsets_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoDecodeH264PictureInfoKHR & operator=( VideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH264PictureInfoKHR & operator=( VkVideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdPictureInfo = pStdPictureInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT
{
sliceCount = sliceCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPSliceOffsets( const uint32_t * pSliceOffsets_ ) VULKAN_HPP_NOEXCEPT
{
pSliceOffsets = pSliceOffsets_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH264PictureInfoKHR & setSliceOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & sliceOffsets_ ) VULKAN_HPP_NOEXCEPT
{
sliceCount = static_cast<uint32_t>( sliceOffsets_.size() );
pSliceOffsets = sliceOffsets_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeH264PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH264PictureInfoKHR*>( this );
}
operator VkVideoDecodeH264PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH264PictureInfoKHR*>( this );
}
operator VkVideoDecodeH264PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH264PictureInfoKHR*>( this );
}
operator VkVideoDecodeH264PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH264PictureInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH264PictureInfo * const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdPictureInfo, sliceCount, pSliceOffsets );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeH264PictureInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdPictureInfo == rhs.pStdPictureInfo )
&& ( sliceCount == rhs.sliceCount )
&& ( pSliceOffsets == rhs.pSliceOffsets );
#endif
}
bool operator!=( VideoDecodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoKHR;
const void * pNext = {};
const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {};
uint32_t sliceCount = {};
const uint32_t * pSliceOffsets = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH264PictureInfoKHR>
{
using Type = VideoDecodeH264PictureInfoKHR;
};
// wrapper struct for struct VkVideoDecodeH264ProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264ProfileInfoKHR.html
struct VideoDecodeH264ProfileInfoKHR
{
using NativeType = VkVideoDecodeH264ProfileInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoKHR(StdVideoH264ProfileIdc stdProfileIdc_ = {}, VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ = VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stdProfileIdc{ stdProfileIdc_ }, pictureLayout{ pictureLayout_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoKHR( VideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH264ProfileInfoKHR( VkVideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH264ProfileInfoKHR( *reinterpret_cast<VideoDecodeH264ProfileInfoKHR const *>( &rhs ) )
{}
VideoDecodeH264ProfileInfoKHR & operator=( VideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH264ProfileInfoKHR & operator=( VkVideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
{
stdProfileIdc = stdProfileIdc_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setPictureLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ ) VULKAN_HPP_NOEXCEPT
{
pictureLayout = pictureLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeH264ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH264ProfileInfoKHR*>( this );
}
operator VkVideoDecodeH264ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH264ProfileInfoKHR*>( this );
}
operator VkVideoDecodeH264ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH264ProfileInfoKHR*>( this );
}
operator VkVideoDecodeH264ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH264ProfileInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoH264ProfileIdc const &, VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stdProfileIdc, pictureLayout );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = pictureLayout <=> rhs.pictureLayout; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 )
&& ( pictureLayout == rhs.pictureLayout );
}
bool operator!=( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileInfoKHR;
const void * pNext = {};
StdVideoH264ProfileIdc stdProfileIdc = {};
VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout = VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive;
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH264ProfileInfoKHR>
{
using Type = VideoDecodeH264ProfileInfoKHR;
};
// wrapper struct for struct VkVideoDecodeH264SessionParametersAddInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264SessionParametersAddInfoKHR.html
struct VideoDecodeH264SessionParametersAddInfoKHR
{
using NativeType = VkVideoDecodeH264SessionParametersAddInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersAddInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoKHR(uint32_t stdSPSCount_ = {}, const StdVideoH264SequenceParameterSet * pStdSPSs_ = {}, uint32_t stdPPSCount_ = {}, const StdVideoH264PictureParameterSet * pStdPPSs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stdSPSCount{ stdSPSCount_ }, pStdSPSs{ pStdSPSs_ }, stdPPSCount{ stdPPSCount_ }, pStdPPSs{ pStdPPSs_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoKHR( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH264SessionParametersAddInfoKHR( VkVideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH264SessionParametersAddInfoKHR( *reinterpret_cast<VideoDecodeH264SessionParametersAddInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH264SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) ), pStdSPSs( stdSPSs_.data() ), stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) ), pStdPPSs( stdPPSs_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoDecodeH264SessionParametersAddInfoKHR & operator=( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH264SessionParametersAddInfoKHR & operator=( VkVideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
stdSPSCount = stdSPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
pStdSPSs = pStdSPSs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH264SessionParametersAddInfoKHR & setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
pStdSPSs = stdSPSs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
stdPPSCount = stdPPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
pStdPPSs = pStdPPSs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH264SessionParametersAddInfoKHR & setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
pStdPPSs = stdPPSs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeH264SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH264SessionParametersAddInfoKHR*>( this );
}
operator VkVideoDecodeH264SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH264SessionParametersAddInfoKHR*>( this );
}
operator VkVideoDecodeH264SessionParametersAddInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH264SessionParametersAddInfoKHR*>( this );
}
operator VkVideoDecodeH264SessionParametersAddInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH264SessionParametersAddInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const StdVideoH264SequenceParameterSet * const &, uint32_t const &, const StdVideoH264PictureParameterSet * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeH264SessionParametersAddInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stdSPSCount == rhs.stdSPSCount )
&& ( pStdSPSs == rhs.pStdSPSs )
&& ( stdPPSCount == rhs.stdPPSCount )
&& ( pStdPPSs == rhs.pStdPPSs );
#endif
}
bool operator!=( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersAddInfoKHR;
const void * pNext = {};
uint32_t stdSPSCount = {};
const StdVideoH264SequenceParameterSet * pStdSPSs = {};
uint32_t stdPPSCount = {};
const StdVideoH264PictureParameterSet * pStdPPSs = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH264SessionParametersAddInfoKHR>
{
using Type = VideoDecodeH264SessionParametersAddInfoKHR;
};
// wrapper struct for struct VkVideoDecodeH264SessionParametersCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264SessionParametersCreateInfoKHR.html
struct VideoDecodeH264SessionParametersCreateInfoKHR
{
using NativeType = VkVideoDecodeH264SessionParametersCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoKHR(uint32_t maxStdSPSCount_ = {}, uint32_t maxStdPPSCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxStdSPSCount{ maxStdSPSCount_ }, maxStdPPSCount{ maxStdPPSCount_ }, pParametersAddInfo{ pParametersAddInfo_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoKHR( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH264SessionParametersCreateInfoKHR( VkVideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH264SessionParametersCreateInfoKHR( *reinterpret_cast<VideoDecodeH264SessionParametersCreateInfoKHR const *>( &rhs ) )
{}
VideoDecodeH264SessionParametersCreateInfoKHR & operator=( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH264SessionParametersCreateInfoKHR & operator=( VkVideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
maxStdSPSCount = maxStdSPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
maxStdPPSCount = maxStdPPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
{
pParametersAddInfo = pParametersAddInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeH264SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH264SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoDecodeH264SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH264SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoDecodeH264SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH264SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoDecodeH264SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH264SessionParametersCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeH264SessionParametersCreateInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxStdSPSCount == rhs.maxStdSPSCount )
&& ( maxStdPPSCount == rhs.maxStdPPSCount )
&& ( pParametersAddInfo == rhs.pParametersAddInfo );
#endif
}
bool operator!=( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR;
const void * pNext = {};
uint32_t maxStdSPSCount = {};
uint32_t maxStdPPSCount = {};
const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR>
{
using Type = VideoDecodeH264SessionParametersCreateInfoKHR;
};
// wrapper struct for struct VkVideoDecodeH265CapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265CapabilitiesKHR.html
struct VideoDecodeH265CapabilitiesKHR
{
using NativeType = VkVideoDecodeH265CapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR(StdVideoH265LevelIdc maxLevelIdc_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxLevelIdc{ maxLevelIdc_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR( VideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265CapabilitiesKHR( VkVideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH265CapabilitiesKHR( *reinterpret_cast<VideoDecodeH265CapabilitiesKHR const *>( &rhs ) )
{}
VideoDecodeH265CapabilitiesKHR & operator=( VideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH265CapabilitiesKHR & operator=( VkVideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoDecodeH265CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH265CapabilitiesKHR*>( this );
}
operator VkVideoDecodeH265CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH265CapabilitiesKHR*>( this );
}
operator VkVideoDecodeH265CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH265CapabilitiesKHR*>( this );
}
operator VkVideoDecodeH265CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH265CapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, StdVideoH265LevelIdc const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxLevelIdc );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 );
}
bool operator!=( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesKHR;
void * pNext = {};
StdVideoH265LevelIdc maxLevelIdc = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH265CapabilitiesKHR>
{
using Type = VideoDecodeH265CapabilitiesKHR;
};
// wrapper struct for struct VkVideoDecodeH265DpbSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265DpbSlotInfoKHR.html
struct VideoDecodeH265DpbSlotInfoKHR
{
using NativeType = VkVideoDecodeH265DpbSlotInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR(const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdReferenceInfo{ pStdReferenceInfo_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR( VideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265DpbSlotInfoKHR( VkVideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH265DpbSlotInfoKHR( *reinterpret_cast<VideoDecodeH265DpbSlotInfoKHR const *>( &rhs ) )
{}
VideoDecodeH265DpbSlotInfoKHR & operator=( VideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH265DpbSlotInfoKHR & operator=( VkVideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdReferenceInfo = pStdReferenceInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeH265DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH265DpbSlotInfoKHR*>( this );
}
operator VkVideoDecodeH265DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH265DpbSlotInfoKHR*>( this );
}
operator VkVideoDecodeH265DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH265DpbSlotInfoKHR*>( this );
}
operator VkVideoDecodeH265DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH265DpbSlotInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH265ReferenceInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdReferenceInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeH265DpbSlotInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdReferenceInfo == rhs.pStdReferenceInfo );
#endif
}
bool operator!=( VideoDecodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265DpbSlotInfoKHR;
const void * pNext = {};
const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH265DpbSlotInfoKHR>
{
using Type = VideoDecodeH265DpbSlotInfoKHR;
};
// wrapper struct for struct VkVideoDecodeH265InlineSessionParametersInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265InlineSessionParametersInfoKHR.html
struct VideoDecodeH265InlineSessionParametersInfoKHR
{
using NativeType = VkVideoDecodeH265InlineSessionParametersInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH265InlineSessionParametersInfoKHR(const StdVideoH265VideoParameterSet * pStdVPS_ = {}, const StdVideoH265SequenceParameterSet * pStdSPS_ = {}, const StdVideoH265PictureParameterSet * pStdPPS_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdVPS{ pStdVPS_ }, pStdSPS{ pStdSPS_ }, pStdPPS{ pStdPPS_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH265InlineSessionParametersInfoKHR( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265InlineSessionParametersInfoKHR( VkVideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH265InlineSessionParametersInfoKHR( *reinterpret_cast<VideoDecodeH265InlineSessionParametersInfoKHR const *>( &rhs ) )
{}
VideoDecodeH265InlineSessionParametersInfoKHR & operator=( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH265InlineSessionParametersInfoKHR & operator=( VkVideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265InlineSessionParametersInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPStdVPS( const StdVideoH265VideoParameterSet * pStdVPS_ ) VULKAN_HPP_NOEXCEPT
{
pStdVPS = pStdVPS_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPStdSPS( const StdVideoH265SequenceParameterSet * pStdSPS_ ) VULKAN_HPP_NOEXCEPT
{
pStdSPS = pStdSPS_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPStdPPS( const StdVideoH265PictureParameterSet * pStdPPS_ ) VULKAN_HPP_NOEXCEPT
{
pStdPPS = pStdPPS_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeH265InlineSessionParametersInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH265InlineSessionParametersInfoKHR*>( this );
}
operator VkVideoDecodeH265InlineSessionParametersInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH265InlineSessionParametersInfoKHR*>( this );
}
operator VkVideoDecodeH265InlineSessionParametersInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH265InlineSessionParametersInfoKHR*>( this );
}
operator VkVideoDecodeH265InlineSessionParametersInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH265InlineSessionParametersInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoH265VideoParameterSet * const &, const StdVideoH265SequenceParameterSet * const &, const StdVideoH265PictureParameterSet * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdVPS, pStdSPS, pStdPPS );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeH265InlineSessionParametersInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdVPS == rhs.pStdVPS )
&& ( pStdSPS == rhs.pStdSPS )
&& ( pStdPPS == rhs.pStdPPS );
#endif
}
bool operator!=( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR;
const void * pNext = {};
const StdVideoH265VideoParameterSet * pStdVPS = {};
const StdVideoH265SequenceParameterSet * pStdSPS = {};
const StdVideoH265PictureParameterSet * pStdPPS = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR>
{
using Type = VideoDecodeH265InlineSessionParametersInfoKHR;
};
// wrapper struct for struct VkVideoDecodeH265PictureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265PictureInfoKHR.html
struct VideoDecodeH265PictureInfoKHR
{
using NativeType = VkVideoDecodeH265PictureInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265PictureInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoKHR(const StdVideoDecodeH265PictureInfo * pStdPictureInfo_ = {}, uint32_t sliceSegmentCount_ = {}, const uint32_t * pSliceSegmentOffsets_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdPictureInfo{ pStdPictureInfo_ }, sliceSegmentCount{ sliceSegmentCount_ }, pSliceSegmentOffsets{ pSliceSegmentOffsets_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoKHR( VideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265PictureInfoKHR( VkVideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH265PictureInfoKHR( *reinterpret_cast<VideoDecodeH265PictureInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH265PictureInfoKHR( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & sliceSegmentOffsets_, const void * pNext_ = nullptr )
: pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), sliceSegmentCount( static_cast<uint32_t>( sliceSegmentOffsets_.size() ) ), pSliceSegmentOffsets( sliceSegmentOffsets_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoDecodeH265PictureInfoKHR & operator=( VideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH265PictureInfoKHR & operator=( VkVideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdPictureInfo = pStdPictureInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setSliceSegmentCount( uint32_t sliceSegmentCount_ ) VULKAN_HPP_NOEXCEPT
{
sliceSegmentCount = sliceSegmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPSliceSegmentOffsets( const uint32_t * pSliceSegmentOffsets_ ) VULKAN_HPP_NOEXCEPT
{
pSliceSegmentOffsets = pSliceSegmentOffsets_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH265PictureInfoKHR & setSliceSegmentOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & sliceSegmentOffsets_ ) VULKAN_HPP_NOEXCEPT
{
sliceSegmentCount = static_cast<uint32_t>( sliceSegmentOffsets_.size() );
pSliceSegmentOffsets = sliceSegmentOffsets_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeH265PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH265PictureInfoKHR*>( this );
}
operator VkVideoDecodeH265PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH265PictureInfoKHR*>( this );
}
operator VkVideoDecodeH265PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH265PictureInfoKHR*>( this );
}
operator VkVideoDecodeH265PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH265PictureInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH265PictureInfo * const &, uint32_t const &, const uint32_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdPictureInfo, sliceSegmentCount, pSliceSegmentOffsets );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeH265PictureInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdPictureInfo == rhs.pStdPictureInfo )
&& ( sliceSegmentCount == rhs.sliceSegmentCount )
&& ( pSliceSegmentOffsets == rhs.pSliceSegmentOffsets );
#endif
}
bool operator!=( VideoDecodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoKHR;
const void * pNext = {};
const StdVideoDecodeH265PictureInfo * pStdPictureInfo = {};
uint32_t sliceSegmentCount = {};
const uint32_t * pSliceSegmentOffsets = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH265PictureInfoKHR>
{
using Type = VideoDecodeH265PictureInfoKHR;
};
// wrapper struct for struct VkVideoDecodeH265ProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265ProfileInfoKHR.html
struct VideoDecodeH265ProfileInfoKHR
{
using NativeType = VkVideoDecodeH265ProfileInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR(StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stdProfileIdc{ stdProfileIdc_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR( VideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265ProfileInfoKHR( VkVideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH265ProfileInfoKHR( *reinterpret_cast<VideoDecodeH265ProfileInfoKHR const *>( &rhs ) )
{}
VideoDecodeH265ProfileInfoKHR & operator=( VideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH265ProfileInfoKHR & operator=( VkVideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoKHR & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
{
stdProfileIdc = stdProfileIdc_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeH265ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH265ProfileInfoKHR*>( this );
}
operator VkVideoDecodeH265ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH265ProfileInfoKHR*>( this );
}
operator VkVideoDecodeH265ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH265ProfileInfoKHR*>( this );
}
operator VkVideoDecodeH265ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH265ProfileInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoH265ProfileIdc const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stdProfileIdc );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
}
bool operator!=( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileInfoKHR;
const void * pNext = {};
StdVideoH265ProfileIdc stdProfileIdc = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH265ProfileInfoKHR>
{
using Type = VideoDecodeH265ProfileInfoKHR;
};
// wrapper struct for struct VkVideoDecodeH265SessionParametersAddInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265SessionParametersAddInfoKHR.html
struct VideoDecodeH265SessionParametersAddInfoKHR
{
using NativeType = VkVideoDecodeH265SessionParametersAddInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersAddInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoKHR(uint32_t stdVPSCount_ = {}, const StdVideoH265VideoParameterSet * pStdVPSs_ = {}, uint32_t stdSPSCount_ = {}, const StdVideoH265SequenceParameterSet * pStdSPSs_ = {}, uint32_t stdPPSCount_ = {}, const StdVideoH265PictureParameterSet * pStdPPSs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stdVPSCount{ stdVPSCount_ }, pStdVPSs{ pStdVPSs_ }, stdSPSCount{ stdSPSCount_ }, pStdSPSs{ pStdSPSs_ }, stdPPSCount{ stdPPSCount_ }, pStdPPSs{ pStdPPSs_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoKHR( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265SessionParametersAddInfoKHR( VkVideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH265SessionParametersAddInfoKHR( *reinterpret_cast<VideoDecodeH265SessionParametersAddInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH265SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), stdVPSCount( static_cast<uint32_t>( stdVPSs_.size() ) ), pStdVPSs( stdVPSs_.data() ), stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) ), pStdSPSs( stdSPSs_.data() ), stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) ), pStdPPSs( stdPPSs_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoDecodeH265SessionParametersAddInfoKHR & operator=( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH265SessionParametersAddInfoKHR & operator=( VkVideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT
{
stdVPSCount = stdVPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT
{
pStdVPSs = pStdVPSs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH265SessionParametersAddInfoKHR & setStdVPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT
{
stdVPSCount = static_cast<uint32_t>( stdVPSs_.size() );
pStdVPSs = stdVPSs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
stdSPSCount = stdSPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
pStdSPSs = pStdSPSs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH265SessionParametersAddInfoKHR & setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
pStdSPSs = stdSPSs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
stdPPSCount = stdPPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
pStdPPSs = pStdPPSs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH265SessionParametersAddInfoKHR & setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
pStdPPSs = stdPPSs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeH265SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH265SessionParametersAddInfoKHR*>( this );
}
operator VkVideoDecodeH265SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH265SessionParametersAddInfoKHR*>( this );
}
operator VkVideoDecodeH265SessionParametersAddInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH265SessionParametersAddInfoKHR*>( this );
}
operator VkVideoDecodeH265SessionParametersAddInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH265SessionParametersAddInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const StdVideoH265VideoParameterSet * const &, uint32_t const &, const StdVideoH265SequenceParameterSet * const &, uint32_t const &, const StdVideoH265PictureParameterSet * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeH265SessionParametersAddInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stdVPSCount == rhs.stdVPSCount )
&& ( pStdVPSs == rhs.pStdVPSs )
&& ( stdSPSCount == rhs.stdSPSCount )
&& ( pStdSPSs == rhs.pStdSPSs )
&& ( stdPPSCount == rhs.stdPPSCount )
&& ( pStdPPSs == rhs.pStdPPSs );
#endif
}
bool operator!=( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersAddInfoKHR;
const void * pNext = {};
uint32_t stdVPSCount = {};
const StdVideoH265VideoParameterSet * pStdVPSs = {};
uint32_t stdSPSCount = {};
const StdVideoH265SequenceParameterSet * pStdSPSs = {};
uint32_t stdPPSCount = {};
const StdVideoH265PictureParameterSet * pStdPPSs = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH265SessionParametersAddInfoKHR>
{
using Type = VideoDecodeH265SessionParametersAddInfoKHR;
};
// wrapper struct for struct VkVideoDecodeH265SessionParametersCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265SessionParametersCreateInfoKHR.html
struct VideoDecodeH265SessionParametersCreateInfoKHR
{
using NativeType = VkVideoDecodeH265SessionParametersCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoKHR(uint32_t maxStdVPSCount_ = {}, uint32_t maxStdSPSCount_ = {}, uint32_t maxStdPPSCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxStdVPSCount{ maxStdVPSCount_ }, maxStdSPSCount{ maxStdSPSCount_ }, maxStdPPSCount{ maxStdPPSCount_ }, pParametersAddInfo{ pParametersAddInfo_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoKHR( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265SessionParametersCreateInfoKHR( VkVideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH265SessionParametersCreateInfoKHR( *reinterpret_cast<VideoDecodeH265SessionParametersCreateInfoKHR const *>( &rhs ) )
{}
VideoDecodeH265SessionParametersCreateInfoKHR & operator=( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeH265SessionParametersCreateInfoKHR & operator=( VkVideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT
{
maxStdVPSCount = maxStdVPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
maxStdSPSCount = maxStdSPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
maxStdPPSCount = maxStdPPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
{
pParametersAddInfo = pParametersAddInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeH265SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH265SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoDecodeH265SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH265SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoDecodeH265SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeH265SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoDecodeH265SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeH265SessionParametersCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeH265SessionParametersCreateInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxStdVPSCount == rhs.maxStdVPSCount )
&& ( maxStdSPSCount == rhs.maxStdSPSCount )
&& ( maxStdPPSCount == rhs.maxStdPPSCount )
&& ( pParametersAddInfo == rhs.pParametersAddInfo );
#endif
}
bool operator!=( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR;
const void * pNext = {};
uint32_t maxStdVPSCount = {};
uint32_t maxStdSPSCount = {};
uint32_t maxStdPPSCount = {};
const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR>
{
using Type = VideoDecodeH265SessionParametersCreateInfoKHR;
};
// wrapper struct for struct VkVideoDecodeInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeInfoKHR.html
struct VideoDecodeInfoKHR
{
using NativeType = VkVideoDecodeInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR(VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {}, VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {}, uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, srcBuffer{ srcBuffer_ }, srcBufferOffset{ srcBufferOffset_ }, srcBufferRange{ srcBufferRange_ }, dstPictureResource{ dstPictureResource_ }, pSetupReferenceSlot{ pSetupReferenceSlot_ }, referenceSlotCount{ referenceSlotCount_ }, pReferenceSlots{ pReferenceSlots_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeInfoKHR( *reinterpret_cast<VideoDecodeInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_, VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_, VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), srcBuffer( srcBuffer_ ), srcBufferOffset( srcBufferOffset_ ), srcBufferRange( srcBufferRange_ ), dstPictureResource( dstPictureResource_ ), pSetupReferenceSlot( pSetupReferenceSlot_ ), referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) ), pReferenceSlots( referenceSlots_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoDecodeInfoKHR & operator=( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeInfoKHR & operator=( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
{
srcBuffer = srcBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcBufferOffset = srcBufferOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT
{
srcBufferRange = srcBufferRange_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setDstPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT
{
dstPictureResource = dstPictureResource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
{
pSetupReferenceSlot = pSetupReferenceSlot_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
{
referenceSlotCount = referenceSlotCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
{
pReferenceSlots = pReferenceSlots_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeInfoKHR & setReferenceSlots( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
{
referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
pReferenceSlots = referenceSlots_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeInfoKHR*>( this );
}
operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeInfoKHR*>( this );
}
operator VkVideoDecodeInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeInfoKHR*>( this );
}
operator VkVideoDecodeInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, srcBuffer, srcBufferOffset, srcBufferRange, dstPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( srcBuffer == rhs.srcBuffer )
&& ( srcBufferOffset == rhs.srcBufferOffset )
&& ( srcBufferRange == rhs.srcBufferRange )
&& ( dstPictureResource == rhs.dstPictureResource )
&& ( pSetupReferenceSlot == rhs.pSetupReferenceSlot )
&& ( referenceSlotCount == rhs.referenceSlotCount )
&& ( pReferenceSlots == rhs.pReferenceSlots );
#endif
}
bool operator!=( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {};
VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource = {};
const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {};
uint32_t referenceSlotCount = {};
const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeInfoKHR>
{
using Type = VideoDecodeInfoKHR;
};
// wrapper struct for struct VkVideoDecodeUsageInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeUsageInfoKHR.html
struct VideoDecodeUsageInfoKHR
{
using NativeType = VkVideoDecodeUsageInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeUsageInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR(VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, videoUsageHints{ videoUsageHints_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeUsageInfoKHR( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeUsageInfoKHR( *reinterpret_cast<VideoDecodeUsageInfoKHR const *>( &rhs ) )
{}
VideoDecodeUsageInfoKHR & operator=( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeUsageInfoKHR & operator=( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setVideoUsageHints( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT
{
videoUsageHints = videoUsageHints_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeUsageInfoKHR*>( this );
}
operator VkVideoDecodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeUsageInfoKHR*>( this );
}
operator VkVideoDecodeUsageInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeUsageInfoKHR*>( this );
}
operator VkVideoDecodeUsageInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeUsageInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, videoUsageHints );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeUsageInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( videoUsageHints == rhs.videoUsageHints );
#endif
}
bool operator!=( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeUsageInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeUsageInfoKHR>
{
using Type = VideoDecodeUsageInfoKHR;
};
// wrapper struct for struct VkVideoDecodeVP9CapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeVP9CapabilitiesKHR.html
struct VideoDecodeVP9CapabilitiesKHR
{
using NativeType = VkVideoDecodeVP9CapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeVp9CapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeVP9CapabilitiesKHR(StdVideoVP9Level maxLevel_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxLevel{ maxLevel_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeVP9CapabilitiesKHR( VideoDecodeVP9CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeVP9CapabilitiesKHR( VkVideoDecodeVP9CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeVP9CapabilitiesKHR( *reinterpret_cast<VideoDecodeVP9CapabilitiesKHR const *>( &rhs ) )
{}
VideoDecodeVP9CapabilitiesKHR & operator=( VideoDecodeVP9CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeVP9CapabilitiesKHR & operator=( VkVideoDecodeVP9CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeVP9CapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoDecodeVP9CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeVP9CapabilitiesKHR*>( this );
}
operator VkVideoDecodeVP9CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeVP9CapabilitiesKHR*>( this );
}
operator VkVideoDecodeVP9CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeVP9CapabilitiesKHR*>( this );
}
operator VkVideoDecodeVP9CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeVP9CapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, StdVideoVP9Level const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxLevel );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoDecodeVP9CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoVP9Level ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoDecodeVP9CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoVP9Level ) ) == 0 );
}
bool operator!=( VideoDecodeVP9CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeVp9CapabilitiesKHR;
void * pNext = {};
StdVideoVP9Level maxLevel = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeVp9CapabilitiesKHR>
{
using Type = VideoDecodeVP9CapabilitiesKHR;
};
// wrapper struct for struct VkVideoDecodeVP9PictureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeVP9PictureInfoKHR.html
struct VideoDecodeVP9PictureInfoKHR
{
using NativeType = VkVideoDecodeVP9PictureInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeVp9PictureInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR(const StdVideoDecodeVP9PictureInfo * pStdPictureInfo_ = {}, std::array<int32_t,VK_MAX_VIDEO_VP9_REFERENCES_PER_FRAME_KHR> const & referenceNameSlotIndices_ = {}, uint32_t uncompressedHeaderOffset_ = {}, uint32_t compressedHeaderOffset_ = {}, uint32_t tilesOffset_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdPictureInfo{ pStdPictureInfo_ }, referenceNameSlotIndices{ referenceNameSlotIndices_ }, uncompressedHeaderOffset{ uncompressedHeaderOffset_ }, compressedHeaderOffset{ compressedHeaderOffset_ }, tilesOffset{ tilesOffset_ }
{}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR( VideoDecodeVP9PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeVP9PictureInfoKHR( VkVideoDecodeVP9PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeVP9PictureInfoKHR( *reinterpret_cast<VideoDecodeVP9PictureInfoKHR const *>( &rhs ) )
{}
VideoDecodeVP9PictureInfoKHR & operator=( VideoDecodeVP9PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeVP9PictureInfoKHR & operator=( VkVideoDecodeVP9PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeVP9PictureInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeVP9PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdPictureInfo = pStdPictureInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR & setReferenceNameSlotIndices( std::array<int32_t,VK_MAX_VIDEO_VP9_REFERENCES_PER_FRAME_KHR> referenceNameSlotIndices_ ) VULKAN_HPP_NOEXCEPT
{
referenceNameSlotIndices = referenceNameSlotIndices_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR & setUncompressedHeaderOffset( uint32_t uncompressedHeaderOffset_ ) VULKAN_HPP_NOEXCEPT
{
uncompressedHeaderOffset = uncompressedHeaderOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR & setCompressedHeaderOffset( uint32_t compressedHeaderOffset_ ) VULKAN_HPP_NOEXCEPT
{
compressedHeaderOffset = compressedHeaderOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR & setTilesOffset( uint32_t tilesOffset_ ) VULKAN_HPP_NOEXCEPT
{
tilesOffset = tilesOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeVP9PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeVP9PictureInfoKHR*>( this );
}
operator VkVideoDecodeVP9PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeVP9PictureInfoKHR*>( this );
}
operator VkVideoDecodeVP9PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeVP9PictureInfoKHR*>( this );
}
operator VkVideoDecodeVP9PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeVP9PictureInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeVP9PictureInfo * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, VK_MAX_VIDEO_VP9_REFERENCES_PER_FRAME_KHR> const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdPictureInfo, referenceNameSlotIndices, uncompressedHeaderOffset, compressedHeaderOffset, tilesOffset );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoDecodeVP9PictureInfoKHR const & ) const = default;
#else
bool operator==( VideoDecodeVP9PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdPictureInfo == rhs.pStdPictureInfo )
&& ( referenceNameSlotIndices == rhs.referenceNameSlotIndices )
&& ( uncompressedHeaderOffset == rhs.uncompressedHeaderOffset )
&& ( compressedHeaderOffset == rhs.compressedHeaderOffset )
&& ( tilesOffset == rhs.tilesOffset );
#endif
}
bool operator!=( VideoDecodeVP9PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeVp9PictureInfoKHR;
const void * pNext = {};
const StdVideoDecodeVP9PictureInfo * pStdPictureInfo = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, VK_MAX_VIDEO_VP9_REFERENCES_PER_FRAME_KHR> referenceNameSlotIndices = {};
uint32_t uncompressedHeaderOffset = {};
uint32_t compressedHeaderOffset = {};
uint32_t tilesOffset = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeVp9PictureInfoKHR>
{
using Type = VideoDecodeVP9PictureInfoKHR;
};
// wrapper struct for struct VkVideoDecodeVP9ProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeVP9ProfileInfoKHR.html
struct VideoDecodeVP9ProfileInfoKHR
{
using NativeType = VkVideoDecodeVP9ProfileInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeVp9ProfileInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeVP9ProfileInfoKHR(StdVideoVP9Profile stdProfile_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stdProfile{ stdProfile_ }
{}
VULKAN_HPP_CONSTEXPR VideoDecodeVP9ProfileInfoKHR( VideoDecodeVP9ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeVP9ProfileInfoKHR( VkVideoDecodeVP9ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeVP9ProfileInfoKHR( *reinterpret_cast<VideoDecodeVP9ProfileInfoKHR const *>( &rhs ) )
{}
VideoDecodeVP9ProfileInfoKHR & operator=( VideoDecodeVP9ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoDecodeVP9ProfileInfoKHR & operator=( VkVideoDecodeVP9ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeVP9ProfileInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9ProfileInfoKHR & setStdProfile( StdVideoVP9Profile stdProfile_ ) VULKAN_HPP_NOEXCEPT
{
stdProfile = stdProfile_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoDecodeVP9ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeVP9ProfileInfoKHR*>( this );
}
operator VkVideoDecodeVP9ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeVP9ProfileInfoKHR*>( this );
}
operator VkVideoDecodeVP9ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoDecodeVP9ProfileInfoKHR*>( this );
}
operator VkVideoDecodeVP9ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoDecodeVP9ProfileInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoVP9Profile const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stdProfile );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoDecodeVP9ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoVP9Profile ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoDecodeVP9ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoVP9Profile ) ) == 0 );
}
bool operator!=( VideoDecodeVP9ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeVp9ProfileInfoKHR;
const void * pNext = {};
StdVideoVP9Profile stdProfile = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoDecodeVp9ProfileInfoKHR>
{
using Type = VideoDecodeVP9ProfileInfoKHR;
};
// wrapper struct for struct VkVideoEncodeAV1CapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1CapabilitiesKHR.html
struct VideoEncodeAV1CapabilitiesKHR
{
using NativeType = VkVideoEncodeAV1CapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1CapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeAV1CapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagsKHR flags_ = {}, StdVideoAV1Level maxLevel_ = {}, VULKAN_HPP_NAMESPACE::Extent2D codedPictureAlignment_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxTiles_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minTileSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxTileSize_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes_ = {}, uint32_t maxSingleReferenceCount_ = {}, uint32_t singleReferenceNameMask_ = {}, uint32_t maxUnidirectionalCompoundReferenceCount_ = {}, uint32_t maxUnidirectionalCompoundGroup1ReferenceCount_ = {}, uint32_t unidirectionalCompoundReferenceNameMask_ = {}, uint32_t maxBidirectionalCompoundReferenceCount_ = {}, uint32_t maxBidirectionalCompoundGroup1ReferenceCount_ = {}, uint32_t maxBidirectionalCompoundGroup2ReferenceCount_ = {}, uint32_t bidirectionalCompoundReferenceNameMask_ = {}, uint32_t maxTemporalLayerCount_ = {}, uint32_t maxSpatialLayerCount_ = {}, uint32_t maxOperatingPoints_ = {}, uint32_t minQIndex_ = {}, uint32_t maxQIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames_ = {}, VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagsKHR stdSyntaxFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, maxLevel{ maxLevel_ }, codedPictureAlignment{ codedPictureAlignment_ }, maxTiles{ maxTiles_ }, minTileSize{ minTileSize_ }, maxTileSize{ maxTileSize_ }, superblockSizes{ superblockSizes_ }, maxSingleReferenceCount{ maxSingleReferenceCount_ }, singleReferenceNameMask{ singleReferenceNameMask_ }, maxUnidirectionalCompoundReferenceCount{ maxUnidirectionalCompoundReferenceCount_ }, maxUnidirectionalCompoundGroup1ReferenceCount{ maxUnidirectionalCompoundGroup1ReferenceCount_ }, unidirectionalCompoundReferenceNameMask{ unidirectionalCompoundReferenceNameMask_ }, maxBidirectionalCompoundReferenceCount{ maxBidirectionalCompoundReferenceCount_ }, maxBidirectionalCompoundGroup1ReferenceCount{ maxBidirectionalCompoundGroup1ReferenceCount_ }, maxBidirectionalCompoundGroup2ReferenceCount{ maxBidirectionalCompoundGroup2ReferenceCount_ }, bidirectionalCompoundReferenceNameMask{ bidirectionalCompoundReferenceNameMask_ }, maxTemporalLayerCount{ maxTemporalLayerCount_ }, maxSpatialLayerCount{ maxSpatialLayerCount_ }, maxOperatingPoints{ maxOperatingPoints_ }, minQIndex{ minQIndex_ }, maxQIndex{ maxQIndex_ }, prefersGopRemainingFrames{ prefersGopRemainingFrames_ }, requiresGopRemainingFrames{ requiresGopRemainingFrames_ }, stdSyntaxFlags{ stdSyntaxFlags_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeAV1CapabilitiesKHR( VideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1CapabilitiesKHR( VkVideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1CapabilitiesKHR( *reinterpret_cast<VideoEncodeAV1CapabilitiesKHR const *>( &rhs ) )
{}
VideoEncodeAV1CapabilitiesKHR & operator=( VideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1CapabilitiesKHR & operator=( VkVideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeAV1CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1CapabilitiesKHR*>( this );
}
operator VkVideoEncodeAV1CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1CapabilitiesKHR*>( this );
}
operator VkVideoEncodeAV1CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1CapabilitiesKHR*>( this );
}
operator VkVideoEncodeAV1CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1CapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagsKHR const &, StdVideoAV1Level const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, maxLevel, codedPictureAlignment, maxTiles, minTileSize, maxTileSize, superblockSizes, maxSingleReferenceCount, singleReferenceNameMask, maxUnidirectionalCompoundReferenceCount, maxUnidirectionalCompoundGroup1ReferenceCount, unidirectionalCompoundReferenceNameMask, maxBidirectionalCompoundReferenceCount, maxBidirectionalCompoundGroup1ReferenceCount, maxBidirectionalCompoundGroup2ReferenceCount, bidirectionalCompoundReferenceNameMask, maxTemporalLayerCount, maxSpatialLayerCount, maxOperatingPoints, minQIndex, maxQIndex, prefersGopRemainingFrames, requiresGopRemainingFrames, stdSyntaxFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = codedPictureAlignment <=> rhs.codedPictureAlignment; cmp != 0 ) return cmp;
if ( auto cmp = maxTiles <=> rhs.maxTiles; cmp != 0 ) return cmp;
if ( auto cmp = minTileSize <=> rhs.minTileSize; cmp != 0 ) return cmp;
if ( auto cmp = maxTileSize <=> rhs.maxTileSize; cmp != 0 ) return cmp;
if ( auto cmp = superblockSizes <=> rhs.superblockSizes; cmp != 0 ) return cmp;
if ( auto cmp = maxSingleReferenceCount <=> rhs.maxSingleReferenceCount; cmp != 0 ) return cmp;
if ( auto cmp = singleReferenceNameMask <=> rhs.singleReferenceNameMask; cmp != 0 ) return cmp;
if ( auto cmp = maxUnidirectionalCompoundReferenceCount <=> rhs.maxUnidirectionalCompoundReferenceCount; cmp != 0 ) return cmp;
if ( auto cmp = maxUnidirectionalCompoundGroup1ReferenceCount <=> rhs.maxUnidirectionalCompoundGroup1ReferenceCount; cmp != 0 ) return cmp;
if ( auto cmp = unidirectionalCompoundReferenceNameMask <=> rhs.unidirectionalCompoundReferenceNameMask; cmp != 0 ) return cmp;
if ( auto cmp = maxBidirectionalCompoundReferenceCount <=> rhs.maxBidirectionalCompoundReferenceCount; cmp != 0 ) return cmp;
if ( auto cmp = maxBidirectionalCompoundGroup1ReferenceCount <=> rhs.maxBidirectionalCompoundGroup1ReferenceCount; cmp != 0 ) return cmp;
if ( auto cmp = maxBidirectionalCompoundGroup2ReferenceCount <=> rhs.maxBidirectionalCompoundGroup2ReferenceCount; cmp != 0 ) return cmp;
if ( auto cmp = bidirectionalCompoundReferenceNameMask <=> rhs.bidirectionalCompoundReferenceNameMask; cmp != 0 ) return cmp;
if ( auto cmp = maxTemporalLayerCount <=> rhs.maxTemporalLayerCount; cmp != 0 ) return cmp;
if ( auto cmp = maxSpatialLayerCount <=> rhs.maxSpatialLayerCount; cmp != 0 ) return cmp;
if ( auto cmp = maxOperatingPoints <=> rhs.maxOperatingPoints; cmp != 0 ) return cmp;
if ( auto cmp = minQIndex <=> rhs.minQIndex; cmp != 0 ) return cmp;
if ( auto cmp = maxQIndex <=> rhs.maxQIndex; cmp != 0 ) return cmp;
if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 ) return cmp;
if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 ) return cmp;
if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 )
&& ( codedPictureAlignment == rhs.codedPictureAlignment )
&& ( maxTiles == rhs.maxTiles )
&& ( minTileSize == rhs.minTileSize )
&& ( maxTileSize == rhs.maxTileSize )
&& ( superblockSizes == rhs.superblockSizes )
&& ( maxSingleReferenceCount == rhs.maxSingleReferenceCount )
&& ( singleReferenceNameMask == rhs.singleReferenceNameMask )
&& ( maxUnidirectionalCompoundReferenceCount == rhs.maxUnidirectionalCompoundReferenceCount )
&& ( maxUnidirectionalCompoundGroup1ReferenceCount == rhs.maxUnidirectionalCompoundGroup1ReferenceCount )
&& ( unidirectionalCompoundReferenceNameMask == rhs.unidirectionalCompoundReferenceNameMask )
&& ( maxBidirectionalCompoundReferenceCount == rhs.maxBidirectionalCompoundReferenceCount )
&& ( maxBidirectionalCompoundGroup1ReferenceCount == rhs.maxBidirectionalCompoundGroup1ReferenceCount )
&& ( maxBidirectionalCompoundGroup2ReferenceCount == rhs.maxBidirectionalCompoundGroup2ReferenceCount )
&& ( bidirectionalCompoundReferenceNameMask == rhs.bidirectionalCompoundReferenceNameMask )
&& ( maxTemporalLayerCount == rhs.maxTemporalLayerCount )
&& ( maxSpatialLayerCount == rhs.maxSpatialLayerCount )
&& ( maxOperatingPoints == rhs.maxOperatingPoints )
&& ( minQIndex == rhs.minQIndex )
&& ( maxQIndex == rhs.maxQIndex )
&& ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames )
&& ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames )
&& ( stdSyntaxFlags == rhs.stdSyntaxFlags );
}
bool operator!=( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1CapabilitiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagsKHR flags = {};
StdVideoAV1Level maxLevel = {};
VULKAN_HPP_NAMESPACE::Extent2D codedPictureAlignment = {};
VULKAN_HPP_NAMESPACE::Extent2D maxTiles = {};
VULKAN_HPP_NAMESPACE::Extent2D minTileSize = {};
VULKAN_HPP_NAMESPACE::Extent2D maxTileSize = {};
VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes = {};
uint32_t maxSingleReferenceCount = {};
uint32_t singleReferenceNameMask = {};
uint32_t maxUnidirectionalCompoundReferenceCount = {};
uint32_t maxUnidirectionalCompoundGroup1ReferenceCount = {};
uint32_t unidirectionalCompoundReferenceNameMask = {};
uint32_t maxBidirectionalCompoundReferenceCount = {};
uint32_t maxBidirectionalCompoundGroup1ReferenceCount = {};
uint32_t maxBidirectionalCompoundGroup2ReferenceCount = {};
uint32_t bidirectionalCompoundReferenceNameMask = {};
uint32_t maxTemporalLayerCount = {};
uint32_t maxSpatialLayerCount = {};
uint32_t maxOperatingPoints = {};
uint32_t minQIndex = {};
uint32_t maxQIndex = {};
VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames = {};
VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames = {};
VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagsKHR stdSyntaxFlags = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeAv1CapabilitiesKHR>
{
using Type = VideoEncodeAV1CapabilitiesKHR;
};
// wrapper struct for struct VkVideoEncodeAV1DpbSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1DpbSlotInfoKHR.html
struct VideoEncodeAV1DpbSlotInfoKHR
{
using NativeType = VkVideoEncodeAV1DpbSlotInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1DpbSlotInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeAV1DpbSlotInfoKHR(const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdReferenceInfo{ pStdReferenceInfo_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeAV1DpbSlotInfoKHR( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1DpbSlotInfoKHR( VkVideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1DpbSlotInfoKHR( *reinterpret_cast<VideoEncodeAV1DpbSlotInfoKHR const *>( &rhs ) )
{}
VideoEncodeAV1DpbSlotInfoKHR & operator=( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1DpbSlotInfoKHR & operator=( VkVideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1DpbSlotInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdReferenceInfo = pStdReferenceInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeAV1DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1DpbSlotInfoKHR*>( this );
}
operator VkVideoEncodeAV1DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1DpbSlotInfoKHR*>( this );
}
operator VkVideoEncodeAV1DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1DpbSlotInfoKHR*>( this );
}
operator VkVideoEncodeAV1DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1DpbSlotInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoEncodeAV1ReferenceInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdReferenceInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeAV1DpbSlotInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdReferenceInfo == rhs.pStdReferenceInfo );
#endif
}
bool operator!=( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1DpbSlotInfoKHR;
const void * pNext = {};
const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeAv1DpbSlotInfoKHR>
{
using Type = VideoEncodeAV1DpbSlotInfoKHR;
};
// wrapper struct for struct VkVideoEncodeAV1FrameSizeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1FrameSizeKHR.html
struct VideoEncodeAV1FrameSizeKHR
{
using NativeType = VkVideoEncodeAV1FrameSizeKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeAV1FrameSizeKHR(uint32_t intraFrameSize_ = {}, uint32_t predictiveFrameSize_ = {}, uint32_t bipredictiveFrameSize_ = {}) VULKAN_HPP_NOEXCEPT
: intraFrameSize{ intraFrameSize_ }, predictiveFrameSize{ predictiveFrameSize_ }, bipredictiveFrameSize{ bipredictiveFrameSize_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeAV1FrameSizeKHR( VideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1FrameSizeKHR( VkVideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1FrameSizeKHR( *reinterpret_cast<VideoEncodeAV1FrameSizeKHR const *>( &rhs ) )
{}
VideoEncodeAV1FrameSizeKHR & operator=( VideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1FrameSizeKHR & operator=( VkVideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setIntraFrameSize( uint32_t intraFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
intraFrameSize = intraFrameSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setPredictiveFrameSize( uint32_t predictiveFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
predictiveFrameSize = predictiveFrameSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setBipredictiveFrameSize( uint32_t bipredictiveFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
bipredictiveFrameSize = bipredictiveFrameSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeAV1FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1FrameSizeKHR*>( this );
}
operator VkVideoEncodeAV1FrameSizeKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1FrameSizeKHR*>( this );
}
operator VkVideoEncodeAV1FrameSizeKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1FrameSizeKHR*>( this );
}
operator VkVideoEncodeAV1FrameSizeKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1FrameSizeKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( intraFrameSize, predictiveFrameSize, bipredictiveFrameSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeAV1FrameSizeKHR const & ) const = default;
#else
bool operator==( VideoEncodeAV1FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( intraFrameSize == rhs.intraFrameSize )
&& ( predictiveFrameSize == rhs.predictiveFrameSize )
&& ( bipredictiveFrameSize == rhs.bipredictiveFrameSize );
#endif
}
bool operator!=( VideoEncodeAV1FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t intraFrameSize = {};
uint32_t predictiveFrameSize = {};
uint32_t bipredictiveFrameSize = {};
};
// wrapper struct for struct VkVideoEncodeAV1GopRemainingFrameInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1GopRemainingFrameInfoKHR.html
struct VideoEncodeAV1GopRemainingFrameInfoKHR
{
using NativeType = VkVideoEncodeAV1GopRemainingFrameInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeAV1GopRemainingFrameInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ = {}, uint32_t gopRemainingIntra_ = {}, uint32_t gopRemainingPredictive_ = {}, uint32_t gopRemainingBipredictive_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, useGopRemainingFrames{ useGopRemainingFrames_ }, gopRemainingIntra{ gopRemainingIntra_ }, gopRemainingPredictive{ gopRemainingPredictive_ }, gopRemainingBipredictive{ gopRemainingBipredictive_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeAV1GopRemainingFrameInfoKHR( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1GopRemainingFrameInfoKHR( VkVideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1GopRemainingFrameInfoKHR( *reinterpret_cast<VideoEncodeAV1GopRemainingFrameInfoKHR const *>( &rhs ) )
{}
VideoEncodeAV1GopRemainingFrameInfoKHR & operator=( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1GopRemainingFrameInfoKHR & operator=( VkVideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1GopRemainingFrameInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setUseGopRemainingFrames( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT
{
useGopRemainingFrames = useGopRemainingFrames_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingIntra( uint32_t gopRemainingIntra_ ) VULKAN_HPP_NOEXCEPT
{
gopRemainingIntra = gopRemainingIntra_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingPredictive( uint32_t gopRemainingPredictive_ ) VULKAN_HPP_NOEXCEPT
{
gopRemainingPredictive = gopRemainingPredictive_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingBipredictive( uint32_t gopRemainingBipredictive_ ) VULKAN_HPP_NOEXCEPT
{
gopRemainingBipredictive = gopRemainingBipredictive_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeAV1GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1GopRemainingFrameInfoKHR*>( this );
}
operator VkVideoEncodeAV1GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1GopRemainingFrameInfoKHR*>( this );
}
operator VkVideoEncodeAV1GopRemainingFrameInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1GopRemainingFrameInfoKHR*>( this );
}
operator VkVideoEncodeAV1GopRemainingFrameInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1GopRemainingFrameInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingIntra, gopRemainingPredictive, gopRemainingBipredictive );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeAV1GopRemainingFrameInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( useGopRemainingFrames == rhs.useGopRemainingFrames )
&& ( gopRemainingIntra == rhs.gopRemainingIntra )
&& ( gopRemainingPredictive == rhs.gopRemainingPredictive )
&& ( gopRemainingBipredictive == rhs.gopRemainingBipredictive );
#endif
}
bool operator!=( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames = {};
uint32_t gopRemainingIntra = {};
uint32_t gopRemainingPredictive = {};
uint32_t gopRemainingBipredictive = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR>
{
using Type = VideoEncodeAV1GopRemainingFrameInfoKHR;
};
// wrapper struct for struct VkVideoEncodeAV1PictureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1PictureInfoKHR.html
struct VideoEncodeAV1PictureInfoKHR
{
using NativeType = VkVideoEncodeAV1PictureInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1PictureInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR predictionMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR::eIntraOnly, VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR rateControlGroup_ = VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR::eIntra, uint32_t constantQIndex_ = {}, const StdVideoEncodeAV1PictureInfo * pStdPictureInfo_ = {}, std::array<int32_t,VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> const & referenceNameSlotIndices_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primaryReferenceCdfOnly_ = {}, VULKAN_HPP_NAMESPACE::Bool32 generateObuExtensionHeader_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, predictionMode{ predictionMode_ }, rateControlGroup{ rateControlGroup_ }, constantQIndex{ constantQIndex_ }, pStdPictureInfo{ pStdPictureInfo_ }, referenceNameSlotIndices{ referenceNameSlotIndices_ }, primaryReferenceCdfOnly{ primaryReferenceCdfOnly_ }, generateObuExtensionHeader{ generateObuExtensionHeader_ }
{}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR( VideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1PictureInfoKHR( VkVideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1PictureInfoKHR( *reinterpret_cast<VideoEncodeAV1PictureInfoKHR const *>( &rhs ) )
{}
VideoEncodeAV1PictureInfoKHR & operator=( VideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1PictureInfoKHR & operator=( VkVideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1PictureInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPredictionMode( VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR predictionMode_ ) VULKAN_HPP_NOEXCEPT
{
predictionMode = predictionMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setRateControlGroup( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR rateControlGroup_ ) VULKAN_HPP_NOEXCEPT
{
rateControlGroup = rateControlGroup_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setConstantQIndex( uint32_t constantQIndex_ ) VULKAN_HPP_NOEXCEPT
{
constantQIndex = constantQIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeAV1PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdPictureInfo = pStdPictureInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setReferenceNameSlotIndices( std::array<int32_t,VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> referenceNameSlotIndices_ ) VULKAN_HPP_NOEXCEPT
{
referenceNameSlotIndices = referenceNameSlotIndices_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPrimaryReferenceCdfOnly( VULKAN_HPP_NAMESPACE::Bool32 primaryReferenceCdfOnly_ ) VULKAN_HPP_NOEXCEPT
{
primaryReferenceCdfOnly = primaryReferenceCdfOnly_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setGenerateObuExtensionHeader( VULKAN_HPP_NAMESPACE::Bool32 generateObuExtensionHeader_ ) VULKAN_HPP_NOEXCEPT
{
generateObuExtensionHeader = generateObuExtensionHeader_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeAV1PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1PictureInfoKHR*>( this );
}
operator VkVideoEncodeAV1PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1PictureInfoKHR*>( this );
}
operator VkVideoEncodeAV1PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1PictureInfoKHR*>( this );
}
operator VkVideoEncodeAV1PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1PictureInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR const &, VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR const &, uint32_t const &, const StdVideoEncodeAV1PictureInfo * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, predictionMode, rateControlGroup, constantQIndex, pStdPictureInfo, referenceNameSlotIndices, primaryReferenceCdfOnly, generateObuExtensionHeader );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeAV1PictureInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( predictionMode == rhs.predictionMode )
&& ( rateControlGroup == rhs.rateControlGroup )
&& ( constantQIndex == rhs.constantQIndex )
&& ( pStdPictureInfo == rhs.pStdPictureInfo )
&& ( referenceNameSlotIndices == rhs.referenceNameSlotIndices )
&& ( primaryReferenceCdfOnly == rhs.primaryReferenceCdfOnly )
&& ( generateObuExtensionHeader == rhs.generateObuExtensionHeader );
#endif
}
bool operator!=( VideoEncodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1PictureInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR predictionMode = VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR::eIntraOnly;
VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR rateControlGroup = VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR::eIntra;
uint32_t constantQIndex = {};
const StdVideoEncodeAV1PictureInfo * pStdPictureInfo = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> referenceNameSlotIndices = {};
VULKAN_HPP_NAMESPACE::Bool32 primaryReferenceCdfOnly = {};
VULKAN_HPP_NAMESPACE::Bool32 generateObuExtensionHeader = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeAv1PictureInfoKHR>
{
using Type = VideoEncodeAV1PictureInfoKHR;
};
// wrapper struct for struct VkVideoEncodeAV1ProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1ProfileInfoKHR.html
struct VideoEncodeAV1ProfileInfoKHR
{
using NativeType = VkVideoEncodeAV1ProfileInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1ProfileInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeAV1ProfileInfoKHR(StdVideoAV1Profile stdProfile_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stdProfile{ stdProfile_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeAV1ProfileInfoKHR( VideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1ProfileInfoKHR( VkVideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1ProfileInfoKHR( *reinterpret_cast<VideoEncodeAV1ProfileInfoKHR const *>( &rhs ) )
{}
VideoEncodeAV1ProfileInfoKHR & operator=( VideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1ProfileInfoKHR & operator=( VkVideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1ProfileInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1ProfileInfoKHR & setStdProfile( StdVideoAV1Profile stdProfile_ ) VULKAN_HPP_NOEXCEPT
{
stdProfile = stdProfile_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeAV1ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1ProfileInfoKHR*>( this );
}
operator VkVideoEncodeAV1ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1ProfileInfoKHR*>( this );
}
operator VkVideoEncodeAV1ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1ProfileInfoKHR*>( this );
}
operator VkVideoEncodeAV1ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1ProfileInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoAV1Profile const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stdProfile );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ) == 0 );
}
bool operator!=( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1ProfileInfoKHR;
const void * pNext = {};
StdVideoAV1Profile stdProfile = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeAv1ProfileInfoKHR>
{
using Type = VideoEncodeAV1ProfileInfoKHR;
};
// wrapper struct for struct VkVideoEncodeAV1QIndexKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1QIndexKHR.html
struct VideoEncodeAV1QIndexKHR
{
using NativeType = VkVideoEncodeAV1QIndexKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeAV1QIndexKHR(uint32_t intraQIndex_ = {}, uint32_t predictiveQIndex_ = {}, uint32_t bipredictiveQIndex_ = {}) VULKAN_HPP_NOEXCEPT
: intraQIndex{ intraQIndex_ }, predictiveQIndex{ predictiveQIndex_ }, bipredictiveQIndex{ bipredictiveQIndex_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeAV1QIndexKHR( VideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1QIndexKHR( VkVideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1QIndexKHR( *reinterpret_cast<VideoEncodeAV1QIndexKHR const *>( &rhs ) )
{}
VideoEncodeAV1QIndexKHR & operator=( VideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1QIndexKHR & operator=( VkVideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setIntraQIndex( uint32_t intraQIndex_ ) VULKAN_HPP_NOEXCEPT
{
intraQIndex = intraQIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setPredictiveQIndex( uint32_t predictiveQIndex_ ) VULKAN_HPP_NOEXCEPT
{
predictiveQIndex = predictiveQIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setBipredictiveQIndex( uint32_t bipredictiveQIndex_ ) VULKAN_HPP_NOEXCEPT
{
bipredictiveQIndex = bipredictiveQIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeAV1QIndexKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1QIndexKHR*>( this );
}
operator VkVideoEncodeAV1QIndexKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1QIndexKHR*>( this );
}
operator VkVideoEncodeAV1QIndexKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1QIndexKHR*>( this );
}
operator VkVideoEncodeAV1QIndexKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1QIndexKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( intraQIndex, predictiveQIndex, bipredictiveQIndex );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeAV1QIndexKHR const & ) const = default;
#else
bool operator==( VideoEncodeAV1QIndexKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( intraQIndex == rhs.intraQIndex )
&& ( predictiveQIndex == rhs.predictiveQIndex )
&& ( bipredictiveQIndex == rhs.bipredictiveQIndex );
#endif
}
bool operator!=( VideoEncodeAV1QIndexKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t intraQIndex = {};
uint32_t predictiveQIndex = {};
uint32_t bipredictiveQIndex = {};
};
// wrapper struct for struct VkVideoEncodeAV1QualityLevelPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1QualityLevelPropertiesKHR.html
struct VideoEncodeAV1QualityLevelPropertiesKHR
{
using NativeType = VkVideoEncodeAV1QualityLevelPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeAV1QualityLevelPropertiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags_ = {}, uint32_t preferredGopFrameCount_ = {}, uint32_t preferredKeyFramePeriod_ = {}, uint32_t preferredConsecutiveBipredictiveFrameCount_ = {}, uint32_t preferredTemporalLayerCount_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR preferredConstantQIndex_ = {}, uint32_t preferredMaxSingleReferenceCount_ = {}, uint32_t preferredSingleReferenceNameMask_ = {}, uint32_t preferredMaxUnidirectionalCompoundReferenceCount_ = {}, uint32_t preferredMaxUnidirectionalCompoundGroup1ReferenceCount_ = {}, uint32_t preferredUnidirectionalCompoundReferenceNameMask_ = {}, uint32_t preferredMaxBidirectionalCompoundReferenceCount_ = {}, uint32_t preferredMaxBidirectionalCompoundGroup1ReferenceCount_ = {}, uint32_t preferredMaxBidirectionalCompoundGroup2ReferenceCount_ = {}, uint32_t preferredBidirectionalCompoundReferenceNameMask_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, preferredRateControlFlags{ preferredRateControlFlags_ }, preferredGopFrameCount{ preferredGopFrameCount_ }, preferredKeyFramePeriod{ preferredKeyFramePeriod_ }, preferredConsecutiveBipredictiveFrameCount{ preferredConsecutiveBipredictiveFrameCount_ }, preferredTemporalLayerCount{ preferredTemporalLayerCount_ }, preferredConstantQIndex{ preferredConstantQIndex_ }, preferredMaxSingleReferenceCount{ preferredMaxSingleReferenceCount_ }, preferredSingleReferenceNameMask{ preferredSingleReferenceNameMask_ }, preferredMaxUnidirectionalCompoundReferenceCount{ preferredMaxUnidirectionalCompoundReferenceCount_ }, preferredMaxUnidirectionalCompoundGroup1ReferenceCount{ preferredMaxUnidirectionalCompoundGroup1ReferenceCount_ }, preferredUnidirectionalCompoundReferenceNameMask{ preferredUnidirectionalCompoundReferenceNameMask_ }, preferredMaxBidirectionalCompoundReferenceCount{ preferredMaxBidirectionalCompoundReferenceCount_ }, preferredMaxBidirectionalCompoundGroup1ReferenceCount{ preferredMaxBidirectionalCompoundGroup1ReferenceCount_ }, preferredMaxBidirectionalCompoundGroup2ReferenceCount{ preferredMaxBidirectionalCompoundGroup2ReferenceCount_ }, preferredBidirectionalCompoundReferenceNameMask{ preferredBidirectionalCompoundReferenceNameMask_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeAV1QualityLevelPropertiesKHR( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1QualityLevelPropertiesKHR( VkVideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1QualityLevelPropertiesKHR( *reinterpret_cast<VideoEncodeAV1QualityLevelPropertiesKHR const *>( &rhs ) )
{}
VideoEncodeAV1QualityLevelPropertiesKHR & operator=( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1QualityLevelPropertiesKHR & operator=( VkVideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1QualityLevelPropertiesKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeAV1QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1QualityLevelPropertiesKHR*>( this );
}
operator VkVideoEncodeAV1QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1QualityLevelPropertiesKHR*>( this );
}
operator VkVideoEncodeAV1QualityLevelPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1QualityLevelPropertiesKHR*>( this );
}
operator VkVideoEncodeAV1QualityLevelPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1QualityLevelPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, preferredRateControlFlags, preferredGopFrameCount, preferredKeyFramePeriod, preferredConsecutiveBipredictiveFrameCount, preferredTemporalLayerCount, preferredConstantQIndex, preferredMaxSingleReferenceCount, preferredSingleReferenceNameMask, preferredMaxUnidirectionalCompoundReferenceCount, preferredMaxUnidirectionalCompoundGroup1ReferenceCount, preferredUnidirectionalCompoundReferenceNameMask, preferredMaxBidirectionalCompoundReferenceCount, preferredMaxBidirectionalCompoundGroup1ReferenceCount, preferredMaxBidirectionalCompoundGroup2ReferenceCount, preferredBidirectionalCompoundReferenceNameMask );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeAV1QualityLevelPropertiesKHR const & ) const = default;
#else
bool operator==( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( preferredRateControlFlags == rhs.preferredRateControlFlags )
&& ( preferredGopFrameCount == rhs.preferredGopFrameCount )
&& ( preferredKeyFramePeriod == rhs.preferredKeyFramePeriod )
&& ( preferredConsecutiveBipredictiveFrameCount == rhs.preferredConsecutiveBipredictiveFrameCount )
&& ( preferredTemporalLayerCount == rhs.preferredTemporalLayerCount )
&& ( preferredConstantQIndex == rhs.preferredConstantQIndex )
&& ( preferredMaxSingleReferenceCount == rhs.preferredMaxSingleReferenceCount )
&& ( preferredSingleReferenceNameMask == rhs.preferredSingleReferenceNameMask )
&& ( preferredMaxUnidirectionalCompoundReferenceCount == rhs.preferredMaxUnidirectionalCompoundReferenceCount )
&& ( preferredMaxUnidirectionalCompoundGroup1ReferenceCount == rhs.preferredMaxUnidirectionalCompoundGroup1ReferenceCount )
&& ( preferredUnidirectionalCompoundReferenceNameMask == rhs.preferredUnidirectionalCompoundReferenceNameMask )
&& ( preferredMaxBidirectionalCompoundReferenceCount == rhs.preferredMaxBidirectionalCompoundReferenceCount )
&& ( preferredMaxBidirectionalCompoundGroup1ReferenceCount == rhs.preferredMaxBidirectionalCompoundGroup1ReferenceCount )
&& ( preferredMaxBidirectionalCompoundGroup2ReferenceCount == rhs.preferredMaxBidirectionalCompoundGroup2ReferenceCount )
&& ( preferredBidirectionalCompoundReferenceNameMask == rhs.preferredBidirectionalCompoundReferenceNameMask );
#endif
}
bool operator!=( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags = {};
uint32_t preferredGopFrameCount = {};
uint32_t preferredKeyFramePeriod = {};
uint32_t preferredConsecutiveBipredictiveFrameCount = {};
uint32_t preferredTemporalLayerCount = {};
VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR preferredConstantQIndex = {};
uint32_t preferredMaxSingleReferenceCount = {};
uint32_t preferredSingleReferenceNameMask = {};
uint32_t preferredMaxUnidirectionalCompoundReferenceCount = {};
uint32_t preferredMaxUnidirectionalCompoundGroup1ReferenceCount = {};
uint32_t preferredUnidirectionalCompoundReferenceNameMask = {};
uint32_t preferredMaxBidirectionalCompoundReferenceCount = {};
uint32_t preferredMaxBidirectionalCompoundGroup1ReferenceCount = {};
uint32_t preferredMaxBidirectionalCompoundGroup2ReferenceCount = {};
uint32_t preferredBidirectionalCompoundReferenceNameMask = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR>
{
using Type = VideoEncodeAV1QualityLevelPropertiesKHR;
};
// wrapper struct for struct VkVideoEncodeAV1QuantizationMapCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1QuantizationMapCapabilitiesKHR.html
struct VideoEncodeAV1QuantizationMapCapabilitiesKHR
{
using NativeType = VkVideoEncodeAV1QuantizationMapCapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeAV1QuantizationMapCapabilitiesKHR(int32_t minQIndexDelta_ = {}, int32_t maxQIndexDelta_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minQIndexDelta{ minQIndexDelta_ }, maxQIndexDelta{ maxQIndexDelta_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeAV1QuantizationMapCapabilitiesKHR( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1QuantizationMapCapabilitiesKHR( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1QuantizationMapCapabilitiesKHR( *reinterpret_cast<VideoEncodeAV1QuantizationMapCapabilitiesKHR const *>( &rhs ) )
{}
VideoEncodeAV1QuantizationMapCapabilitiesKHR & operator=( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1QuantizationMapCapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1QuantizationMapCapabilitiesKHR*>( this );
}
operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1QuantizationMapCapabilitiesKHR*>( this );
}
operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1QuantizationMapCapabilitiesKHR*>( this );
}
operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1QuantizationMapCapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, int32_t const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minQIndexDelta, maxQIndexDelta );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & ) const = default;
#else
bool operator==( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minQIndexDelta == rhs.minQIndexDelta )
&& ( maxQIndexDelta == rhs.maxQIndexDelta );
#endif
}
bool operator!=( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR;
void * pNext = {};
int32_t minQIndexDelta = {};
int32_t maxQIndexDelta = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR>
{
using Type = VideoEncodeAV1QuantizationMapCapabilitiesKHR;
};
// wrapper struct for struct VkVideoEncodeAV1RateControlInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1RateControlInfoKHR.html
struct VideoEncodeAV1RateControlInfoKHR
{
using NativeType = VkVideoEncodeAV1RateControlInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1RateControlInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR flags_ = {}, uint32_t gopFrameCount_ = {}, uint32_t keyFramePeriod_ = {}, uint32_t consecutiveBipredictiveFrameCount_ = {}, uint32_t temporalLayerCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, gopFrameCount{ gopFrameCount_ }, keyFramePeriod{ keyFramePeriod_ }, consecutiveBipredictiveFrameCount{ consecutiveBipredictiveFrameCount_ }, temporalLayerCount{ temporalLayerCount_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlInfoKHR( VideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1RateControlInfoKHR( VkVideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1RateControlInfoKHR( *reinterpret_cast<VideoEncodeAV1RateControlInfoKHR const *>( &rhs ) )
{}
VideoEncodeAV1RateControlInfoKHR & operator=( VideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1RateControlInfoKHR & operator=( VkVideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
{
gopFrameCount = gopFrameCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setKeyFramePeriod( uint32_t keyFramePeriod_ ) VULKAN_HPP_NOEXCEPT
{
keyFramePeriod = keyFramePeriod_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setConsecutiveBipredictiveFrameCount( uint32_t consecutiveBipredictiveFrameCount_ ) VULKAN_HPP_NOEXCEPT
{
consecutiveBipredictiveFrameCount = consecutiveBipredictiveFrameCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setTemporalLayerCount( uint32_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT
{
temporalLayerCount = temporalLayerCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeAV1RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1RateControlInfoKHR*>( this );
}
operator VkVideoEncodeAV1RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1RateControlInfoKHR*>( this );
}
operator VkVideoEncodeAV1RateControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1RateControlInfoKHR*>( this );
}
operator VkVideoEncodeAV1RateControlInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1RateControlInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, gopFrameCount, keyFramePeriod, consecutiveBipredictiveFrameCount, temporalLayerCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeAV1RateControlInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeAV1RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( gopFrameCount == rhs.gopFrameCount )
&& ( keyFramePeriod == rhs.keyFramePeriod )
&& ( consecutiveBipredictiveFrameCount == rhs.consecutiveBipredictiveFrameCount )
&& ( temporalLayerCount == rhs.temporalLayerCount );
#endif
}
bool operator!=( VideoEncodeAV1RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1RateControlInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR flags = {};
uint32_t gopFrameCount = {};
uint32_t keyFramePeriod = {};
uint32_t consecutiveBipredictiveFrameCount = {};
uint32_t temporalLayerCount = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeAv1RateControlInfoKHR>
{
using Type = VideoEncodeAV1RateControlInfoKHR;
};
// wrapper struct for struct VkVideoEncodeAV1RateControlLayerInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1RateControlLayerInfoKHR.html
struct VideoEncodeAV1RateControlLayerInfoKHR
{
using NativeType = VkVideoEncodeAV1RateControlLayerInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1RateControlLayerInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlLayerInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 useMinQIndex_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR minQIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxQIndex_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR maxQIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR maxFrameSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, useMinQIndex{ useMinQIndex_ }, minQIndex{ minQIndex_ }, useMaxQIndex{ useMaxQIndex_ }, maxQIndex{ maxQIndex_ }, useMaxFrameSize{ useMaxFrameSize_ }, maxFrameSize{ maxFrameSize_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlLayerInfoKHR( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1RateControlLayerInfoKHR( VkVideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1RateControlLayerInfoKHR( *reinterpret_cast<VideoEncodeAV1RateControlLayerInfoKHR const *>( &rhs ) )
{}
VideoEncodeAV1RateControlLayerInfoKHR & operator=( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1RateControlLayerInfoKHR & operator=( VkVideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMinQIndex( VULKAN_HPP_NAMESPACE::Bool32 useMinQIndex_ ) VULKAN_HPP_NOEXCEPT
{
useMinQIndex = useMinQIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setMinQIndex( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const & minQIndex_ ) VULKAN_HPP_NOEXCEPT
{
minQIndex = minQIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMaxQIndex( VULKAN_HPP_NAMESPACE::Bool32 useMaxQIndex_ ) VULKAN_HPP_NOEXCEPT
{
useMaxQIndex = useMaxQIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setMaxQIndex( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const & maxQIndex_ ) VULKAN_HPP_NOEXCEPT
{
maxQIndex = maxQIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
useMaxFrameSize = useMaxFrameSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
maxFrameSize = maxFrameSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeAV1RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1RateControlLayerInfoKHR*>( this );
}
operator VkVideoEncodeAV1RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1RateControlLayerInfoKHR*>( this );
}
operator VkVideoEncodeAV1RateControlLayerInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1RateControlLayerInfoKHR*>( this );
}
operator VkVideoEncodeAV1RateControlLayerInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1RateControlLayerInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, useMinQIndex, minQIndex, useMaxQIndex, maxQIndex, useMaxFrameSize, maxFrameSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeAV1RateControlLayerInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( useMinQIndex == rhs.useMinQIndex )
&& ( minQIndex == rhs.minQIndex )
&& ( useMaxQIndex == rhs.useMaxQIndex )
&& ( maxQIndex == rhs.maxQIndex )
&& ( useMaxFrameSize == rhs.useMaxFrameSize )
&& ( maxFrameSize == rhs.maxFrameSize );
#endif
}
bool operator!=( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1RateControlLayerInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 useMinQIndex = {};
VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR minQIndex = {};
VULKAN_HPP_NAMESPACE::Bool32 useMaxQIndex = {};
VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR maxQIndex = {};
VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {};
VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR maxFrameSize = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeAv1RateControlLayerInfoKHR>
{
using Type = VideoEncodeAV1RateControlLayerInfoKHR;
};
// wrapper struct for struct VkVideoEncodeAV1SessionCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1SessionCreateInfoKHR.html
struct VideoEncodeAV1SessionCreateInfoKHR
{
using NativeType = VkVideoEncodeAV1SessionCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1SessionCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionCreateInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 useMaxLevel_ = {}, StdVideoAV1Level maxLevel_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, useMaxLevel{ useMaxLevel_ }, maxLevel{ maxLevel_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionCreateInfoKHR( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1SessionCreateInfoKHR( VkVideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1SessionCreateInfoKHR( *reinterpret_cast<VideoEncodeAV1SessionCreateInfoKHR const *>( &rhs ) )
{}
VideoEncodeAV1SessionCreateInfoKHR & operator=( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1SessionCreateInfoKHR & operator=( VkVideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setUseMaxLevel( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevel_ ) VULKAN_HPP_NOEXCEPT
{
useMaxLevel = useMaxLevel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setMaxLevel( StdVideoAV1Level maxLevel_ ) VULKAN_HPP_NOEXCEPT
{
maxLevel = maxLevel_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeAV1SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1SessionCreateInfoKHR*>( this );
}
operator VkVideoEncodeAV1SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1SessionCreateInfoKHR*>( this );
}
operator VkVideoEncodeAV1SessionCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1SessionCreateInfoKHR*>( this );
}
operator VkVideoEncodeAV1SessionCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1SessionCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, StdVideoAV1Level const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, useMaxLevel, maxLevel );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = useMaxLevel <=> rhs.useMaxLevel; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( useMaxLevel == rhs.useMaxLevel )
&& ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 );
}
bool operator!=( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1SessionCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 useMaxLevel = {};
StdVideoAV1Level maxLevel = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeAv1SessionCreateInfoKHR>
{
using Type = VideoEncodeAV1SessionCreateInfoKHR;
};
// wrapper struct for struct VkVideoEncodeAV1SessionParametersCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1SessionParametersCreateInfoKHR.html
struct VideoEncodeAV1SessionParametersCreateInfoKHR
{
using NativeType = VkVideoEncodeAV1SessionParametersCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionParametersCreateInfoKHR(const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {}, const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_ = {}, uint32_t stdOperatingPointCount_ = {}, const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdSequenceHeader{ pStdSequenceHeader_ }, pStdDecoderModelInfo{ pStdDecoderModelInfo_ }, stdOperatingPointCount{ stdOperatingPointCount_ }, pStdOperatingPoints{ pStdOperatingPoints_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionParametersCreateInfoKHR( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeAV1SessionParametersCreateInfoKHR( VkVideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeAV1SessionParametersCreateInfoKHR( *reinterpret_cast<VideoEncodeAV1SessionParametersCreateInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeAV1SessionParametersCreateInfoKHR( const StdVideoAV1SequenceHeader * pStdSequenceHeader_, const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoEncodeAV1OperatingPointInfo> const & stdOperatingPoints_, const void * pNext_ = nullptr )
: pNext( pNext_ ), pStdSequenceHeader( pStdSequenceHeader_ ), pStdDecoderModelInfo( pStdDecoderModelInfo_ ), stdOperatingPointCount( static_cast<uint32_t>( stdOperatingPoints_.size() ) ), pStdOperatingPoints( stdOperatingPoints_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoEncodeAV1SessionParametersCreateInfoKHR & operator=( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeAV1SessionParametersCreateInfoKHR & operator=( VkVideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionParametersCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT
{
pStdSequenceHeader = pStdSequenceHeader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setPStdDecoderModelInfo( const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdDecoderModelInfo = pStdDecoderModelInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setStdOperatingPointCount( uint32_t stdOperatingPointCount_ ) VULKAN_HPP_NOEXCEPT
{
stdOperatingPointCount = stdOperatingPointCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setPStdOperatingPoints( const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints_ ) VULKAN_HPP_NOEXCEPT
{
pStdOperatingPoints = pStdOperatingPoints_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeAV1SessionParametersCreateInfoKHR & setStdOperatingPoints( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoEncodeAV1OperatingPointInfo> const & stdOperatingPoints_ ) VULKAN_HPP_NOEXCEPT
{
stdOperatingPointCount = static_cast<uint32_t>( stdOperatingPoints_.size() );
pStdOperatingPoints = stdOperatingPoints_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeAV1SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeAV1SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoEncodeAV1SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeAV1SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoEncodeAV1SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeAV1SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoEncodeAV1SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeAV1SessionParametersCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoAV1SequenceHeader * const &, const StdVideoEncodeAV1DecoderModelInfo * const &, uint32_t const &, const StdVideoEncodeAV1OperatingPointInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdSequenceHeader, pStdDecoderModelInfo, stdOperatingPointCount, pStdOperatingPoints );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeAV1SessionParametersCreateInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdSequenceHeader == rhs.pStdSequenceHeader )
&& ( pStdDecoderModelInfo == rhs.pStdDecoderModelInfo )
&& ( stdOperatingPointCount == rhs.stdOperatingPointCount )
&& ( pStdOperatingPoints == rhs.pStdOperatingPoints );
#endif
}
bool operator!=( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR;
const void * pNext = {};
const StdVideoAV1SequenceHeader * pStdSequenceHeader = {};
const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo = {};
uint32_t stdOperatingPointCount = {};
const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR>
{
using Type = VideoEncodeAV1SessionParametersCreateInfoKHR;
};
// wrapper struct for struct VkVideoEncodeCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeCapabilitiesKHR.html
struct VideoEncodeCapabilitiesKHR
{
using NativeType = VkVideoEncodeCapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes_ = {}, uint32_t maxRateControlLayers_ = {}, uint64_t maxBitrate_ = {}, uint32_t maxQualityLevels_ = {}, VULKAN_HPP_NAMESPACE::Extent2D encodeInputPictureGranularity_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, rateControlModes{ rateControlModes_ }, maxRateControlLayers{ maxRateControlLayers_ }, maxBitrate{ maxBitrate_ }, maxQualityLevels{ maxQualityLevels_ }, encodeInputPictureGranularity{ encodeInputPictureGranularity_ }, supportedEncodeFeedbackFlags{ supportedEncodeFeedbackFlags_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeCapabilitiesKHR( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeCapabilitiesKHR( *reinterpret_cast<VideoEncodeCapabilitiesKHR const *>( &rhs ) )
{}
VideoEncodeCapabilitiesKHR & operator=( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeCapabilitiesKHR & operator=( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeCapabilitiesKHR*>( this );
}
operator VkVideoEncodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeCapabilitiesKHR*>( this );
}
operator VkVideoEncodeCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeCapabilitiesKHR*>( this );
}
operator VkVideoEncodeCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeCapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR const &, uint32_t const &, uint64_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, rateControlModes, maxRateControlLayers, maxBitrate, maxQualityLevels, encodeInputPictureGranularity, supportedEncodeFeedbackFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeCapabilitiesKHR const & ) const = default;
#else
bool operator==( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( rateControlModes == rhs.rateControlModes )
&& ( maxRateControlLayers == rhs.maxRateControlLayers )
&& ( maxBitrate == rhs.maxBitrate )
&& ( maxQualityLevels == rhs.maxQualityLevels )
&& ( encodeInputPictureGranularity == rhs.encodeInputPictureGranularity )
&& ( supportedEncodeFeedbackFlags == rhs.supportedEncodeFeedbackFlags );
#endif
}
bool operator!=( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeCapabilitiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes = {};
uint32_t maxRateControlLayers = {};
uint64_t maxBitrate = {};
uint32_t maxQualityLevels = {};
VULKAN_HPP_NAMESPACE::Extent2D encodeInputPictureGranularity = {};
VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeCapabilitiesKHR>
{
using Type = VideoEncodeCapabilitiesKHR;
};
// wrapper struct for struct VkVideoEncodeH264CapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264CapabilitiesKHR.html
struct VideoEncodeH264CapabilitiesKHR
{
using NativeType = VkVideoEncodeH264CapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsKHR flags_ = {}, StdVideoH264LevelIdc maxLevelIdc_ = {}, uint32_t maxSliceCount_ = {}, uint32_t maxPPictureL0ReferenceCount_ = {}, uint32_t maxBPictureL0ReferenceCount_ = {}, uint32_t maxL1ReferenceCount_ = {}, uint32_t maxTemporalLayerCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalLayerPattern_ = {}, int32_t minQp_ = {}, int32_t maxQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames_ = {}, VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsKHR stdSyntaxFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, maxLevelIdc{ maxLevelIdc_ }, maxSliceCount{ maxSliceCount_ }, maxPPictureL0ReferenceCount{ maxPPictureL0ReferenceCount_ }, maxBPictureL0ReferenceCount{ maxBPictureL0ReferenceCount_ }, maxL1ReferenceCount{ maxL1ReferenceCount_ }, maxTemporalLayerCount{ maxTemporalLayerCount_ }, expectDyadicTemporalLayerPattern{ expectDyadicTemporalLayerPattern_ }, minQp{ minQp_ }, maxQp{ maxQp_ }, prefersGopRemainingFrames{ prefersGopRemainingFrames_ }, requiresGopRemainingFrames{ requiresGopRemainingFrames_ }, stdSyntaxFlags{ stdSyntaxFlags_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesKHR( VideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264CapabilitiesKHR( VkVideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264CapabilitiesKHR( *reinterpret_cast<VideoEncodeH264CapabilitiesKHR const *>( &rhs ) )
{}
VideoEncodeH264CapabilitiesKHR & operator=( VideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264CapabilitiesKHR & operator=( VkVideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeH264CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264CapabilitiesKHR*>( this );
}
operator VkVideoEncodeH264CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264CapabilitiesKHR*>( this );
}
operator VkVideoEncodeH264CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264CapabilitiesKHR*>( this );
}
operator VkVideoEncodeH264CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264CapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsKHR const &, StdVideoH264LevelIdc const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, int32_t const &, int32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, maxLevelIdc, maxSliceCount, maxPPictureL0ReferenceCount, maxBPictureL0ReferenceCount, maxL1ReferenceCount, maxTemporalLayerCount, expectDyadicTemporalLayerPattern, minQp, maxQp, prefersGopRemainingFrames, requiresGopRemainingFrames, stdSyntaxFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = maxSliceCount <=> rhs.maxSliceCount; cmp != 0 ) return cmp;
if ( auto cmp = maxPPictureL0ReferenceCount <=> rhs.maxPPictureL0ReferenceCount; cmp != 0 ) return cmp;
if ( auto cmp = maxBPictureL0ReferenceCount <=> rhs.maxBPictureL0ReferenceCount; cmp != 0 ) return cmp;
if ( auto cmp = maxL1ReferenceCount <=> rhs.maxL1ReferenceCount; cmp != 0 ) return cmp;
if ( auto cmp = maxTemporalLayerCount <=> rhs.maxTemporalLayerCount; cmp != 0 ) return cmp;
if ( auto cmp = expectDyadicTemporalLayerPattern <=> rhs.expectDyadicTemporalLayerPattern; cmp != 0 ) return cmp;
if ( auto cmp = minQp <=> rhs.minQp; cmp != 0 ) return cmp;
if ( auto cmp = maxQp <=> rhs.maxQp; cmp != 0 ) return cmp;
if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 ) return cmp;
if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 ) return cmp;
if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 )
&& ( maxSliceCount == rhs.maxSliceCount )
&& ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount )
&& ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount )
&& ( maxL1ReferenceCount == rhs.maxL1ReferenceCount )
&& ( maxTemporalLayerCount == rhs.maxTemporalLayerCount )
&& ( expectDyadicTemporalLayerPattern == rhs.expectDyadicTemporalLayerPattern )
&& ( minQp == rhs.minQp )
&& ( maxQp == rhs.maxQp )
&& ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames )
&& ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames )
&& ( stdSyntaxFlags == rhs.stdSyntaxFlags );
}
bool operator!=( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsKHR flags = {};
StdVideoH264LevelIdc maxLevelIdc = {};
uint32_t maxSliceCount = {};
uint32_t maxPPictureL0ReferenceCount = {};
uint32_t maxBPictureL0ReferenceCount = {};
uint32_t maxL1ReferenceCount = {};
uint32_t maxTemporalLayerCount = {};
VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalLayerPattern = {};
int32_t minQp = {};
int32_t maxQp = {};
VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames = {};
VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsKHR stdSyntaxFlags = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264CapabilitiesKHR>
{
using Type = VideoEncodeH264CapabilitiesKHR;
};
// wrapper struct for struct VkVideoEncodeH264DpbSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264DpbSlotInfoKHR.html
struct VideoEncodeH264DpbSlotInfoKHR
{
using NativeType = VkVideoEncodeH264DpbSlotInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoKHR(const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdReferenceInfo{ pStdReferenceInfo_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoKHR( VideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264DpbSlotInfoKHR( VkVideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264DpbSlotInfoKHR( *reinterpret_cast<VideoEncodeH264DpbSlotInfoKHR const *>( &rhs ) )
{}
VideoEncodeH264DpbSlotInfoKHR & operator=( VideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264DpbSlotInfoKHR & operator=( VkVideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdReferenceInfo = pStdReferenceInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264DpbSlotInfoKHR*>( this );
}
operator VkVideoEncodeH264DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264DpbSlotInfoKHR*>( this );
}
operator VkVideoEncodeH264DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264DpbSlotInfoKHR*>( this );
}
operator VkVideoEncodeH264DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264DpbSlotInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoEncodeH264ReferenceInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdReferenceInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264DpbSlotInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdReferenceInfo == rhs.pStdReferenceInfo );
#endif
}
bool operator!=( VideoEncodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoKHR;
const void * pNext = {};
const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264DpbSlotInfoKHR>
{
using Type = VideoEncodeH264DpbSlotInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH264FrameSizeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264FrameSizeKHR.html
struct VideoEncodeH264FrameSizeKHR
{
using NativeType = VkVideoEncodeH264FrameSizeKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeKHR(uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {}) VULKAN_HPP_NOEXCEPT
: frameISize{ frameISize_ }, framePSize{ framePSize_ }, frameBSize{ frameBSize_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeKHR( VideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264FrameSizeKHR( VkVideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264FrameSizeKHR( *reinterpret_cast<VideoEncodeH264FrameSizeKHR const *>( &rhs ) )
{}
VideoEncodeH264FrameSizeKHR & operator=( VideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264FrameSizeKHR & operator=( VkVideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT
{
frameISize = frameISize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT
{
framePSize = framePSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT
{
frameBSize = frameBSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264FrameSizeKHR*>( this );
}
operator VkVideoEncodeH264FrameSizeKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264FrameSizeKHR*>( this );
}
operator VkVideoEncodeH264FrameSizeKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264FrameSizeKHR*>( this );
}
operator VkVideoEncodeH264FrameSizeKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264FrameSizeKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( frameISize, framePSize, frameBSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264FrameSizeKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( frameISize == rhs.frameISize )
&& ( framePSize == rhs.framePSize )
&& ( frameBSize == rhs.frameBSize );
#endif
}
bool operator!=( VideoEncodeH264FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t frameISize = {};
uint32_t framePSize = {};
uint32_t frameBSize = {};
};
// wrapper struct for struct VkVideoEncodeH264GopRemainingFrameInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264GopRemainingFrameInfoKHR.html
struct VideoEncodeH264GopRemainingFrameInfoKHR
{
using NativeType = VkVideoEncodeH264GopRemainingFrameInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264GopRemainingFrameInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ = {}, uint32_t gopRemainingI_ = {}, uint32_t gopRemainingP_ = {}, uint32_t gopRemainingB_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, useGopRemainingFrames{ useGopRemainingFrames_ }, gopRemainingI{ gopRemainingI_ }, gopRemainingP{ gopRemainingP_ }, gopRemainingB{ gopRemainingB_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264GopRemainingFrameInfoKHR( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264GopRemainingFrameInfoKHR( VkVideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264GopRemainingFrameInfoKHR( *reinterpret_cast<VideoEncodeH264GopRemainingFrameInfoKHR const *>( &rhs ) )
{}
VideoEncodeH264GopRemainingFrameInfoKHR & operator=( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264GopRemainingFrameInfoKHR & operator=( VkVideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setUseGopRemainingFrames( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT
{
useGopRemainingFrames = useGopRemainingFrames_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingI( uint32_t gopRemainingI_ ) VULKAN_HPP_NOEXCEPT
{
gopRemainingI = gopRemainingI_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingP( uint32_t gopRemainingP_ ) VULKAN_HPP_NOEXCEPT
{
gopRemainingP = gopRemainingP_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingB( uint32_t gopRemainingB_ ) VULKAN_HPP_NOEXCEPT
{
gopRemainingB = gopRemainingB_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264GopRemainingFrameInfoKHR*>( this );
}
operator VkVideoEncodeH264GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264GopRemainingFrameInfoKHR*>( this );
}
operator VkVideoEncodeH264GopRemainingFrameInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264GopRemainingFrameInfoKHR*>( this );
}
operator VkVideoEncodeH264GopRemainingFrameInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264GopRemainingFrameInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingI, gopRemainingP, gopRemainingB );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264GopRemainingFrameInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( useGopRemainingFrames == rhs.useGopRemainingFrames )
&& ( gopRemainingI == rhs.gopRemainingI )
&& ( gopRemainingP == rhs.gopRemainingP )
&& ( gopRemainingB == rhs.gopRemainingB );
#endif
}
bool operator!=( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames = {};
uint32_t gopRemainingI = {};
uint32_t gopRemainingP = {};
uint32_t gopRemainingB = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR>
{
using Type = VideoEncodeH264GopRemainingFrameInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH264NaluSliceInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264NaluSliceInfoKHR.html
struct VideoEncodeH264NaluSliceInfoKHR
{
using NativeType = VkVideoEncodeH264NaluSliceInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoKHR(int32_t constantQp_ = {}, const StdVideoEncodeH264SliceHeader * pStdSliceHeader_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, constantQp{ constantQp_ }, pStdSliceHeader{ pStdSliceHeader_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoKHR( VideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264NaluSliceInfoKHR( VkVideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264NaluSliceInfoKHR( *reinterpret_cast<VideoEncodeH264NaluSliceInfoKHR const *>( &rhs ) )
{}
VideoEncodeH264NaluSliceInfoKHR & operator=( VideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264NaluSliceInfoKHR & operator=( VkVideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setConstantQp( int32_t constantQp_ ) VULKAN_HPP_NOEXCEPT
{
constantQp = constantQp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setPStdSliceHeader( const StdVideoEncodeH264SliceHeader * pStdSliceHeader_ ) VULKAN_HPP_NOEXCEPT
{
pStdSliceHeader = pStdSliceHeader_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264NaluSliceInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264NaluSliceInfoKHR*>( this );
}
operator VkVideoEncodeH264NaluSliceInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264NaluSliceInfoKHR*>( this );
}
operator VkVideoEncodeH264NaluSliceInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264NaluSliceInfoKHR*>( this );
}
operator VkVideoEncodeH264NaluSliceInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264NaluSliceInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, int32_t const &, const StdVideoEncodeH264SliceHeader * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, constantQp, pStdSliceHeader );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264NaluSliceInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264NaluSliceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( constantQp == rhs.constantQp )
&& ( pStdSliceHeader == rhs.pStdSliceHeader );
#endif
}
bool operator!=( VideoEncodeH264NaluSliceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceInfoKHR;
const void * pNext = {};
int32_t constantQp = {};
const StdVideoEncodeH264SliceHeader * pStdSliceHeader = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264NaluSliceInfoKHR>
{
using Type = VideoEncodeH264NaluSliceInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH264PictureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264PictureInfoKHR.html
struct VideoEncodeH264PictureInfoKHR
{
using NativeType = VkVideoEncodeH264PictureInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264PictureInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264PictureInfoKHR(uint32_t naluSliceEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries_ = {}, const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {}, VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, naluSliceEntryCount{ naluSliceEntryCount_ }, pNaluSliceEntries{ pNaluSliceEntries_ }, pStdPictureInfo{ pStdPictureInfo_ }, generatePrefixNalu{ generatePrefixNalu_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264PictureInfoKHR( VideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264PictureInfoKHR( VkVideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264PictureInfoKHR( *reinterpret_cast<VideoEncodeH264PictureInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH264PictureInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR> const & naluSliceEntries_, const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {}, VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), naluSliceEntryCount( static_cast<uint32_t>( naluSliceEntries_.size() ) ), pNaluSliceEntries( naluSliceEntries_.data() ), pStdPictureInfo( pStdPictureInfo_ ), generatePrefixNalu( generatePrefixNalu_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoEncodeH264PictureInfoKHR & operator=( VideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264PictureInfoKHR & operator=( VkVideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT
{
naluSliceEntryCount = naluSliceEntryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setPNaluSliceEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
{
pNaluSliceEntries = pNaluSliceEntries_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH264PictureInfoKHR & setNaluSliceEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR> const & naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
{
naluSliceEntryCount = static_cast<uint32_t>( naluSliceEntries_.size() );
pNaluSliceEntries = naluSliceEntries_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdPictureInfo = pStdPictureInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setGeneratePrefixNalu( VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ ) VULKAN_HPP_NOEXCEPT
{
generatePrefixNalu = generatePrefixNalu_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264PictureInfoKHR*>( this );
}
operator VkVideoEncodeH264PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264PictureInfoKHR*>( this );
}
operator VkVideoEncodeH264PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264PictureInfoKHR*>( this );
}
operator VkVideoEncodeH264PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264PictureInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * const &, const StdVideoEncodeH264PictureInfo * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, naluSliceEntryCount, pNaluSliceEntries, pStdPictureInfo, generatePrefixNalu );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264PictureInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( naluSliceEntryCount == rhs.naluSliceEntryCount )
&& ( pNaluSliceEntries == rhs.pNaluSliceEntries )
&& ( pStdPictureInfo == rhs.pStdPictureInfo )
&& ( generatePrefixNalu == rhs.generatePrefixNalu );
#endif
}
bool operator!=( VideoEncodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264PictureInfoKHR;
const void * pNext = {};
uint32_t naluSliceEntryCount = {};
const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries = {};
const StdVideoEncodeH264PictureInfo * pStdPictureInfo = {};
VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264PictureInfoKHR>
{
using Type = VideoEncodeH264PictureInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH264ProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264ProfileInfoKHR.html
struct VideoEncodeH264ProfileInfoKHR
{
using NativeType = VkVideoEncodeH264ProfileInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoKHR(StdVideoH264ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stdProfileIdc{ stdProfileIdc_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoKHR( VideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264ProfileInfoKHR( VkVideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264ProfileInfoKHR( *reinterpret_cast<VideoEncodeH264ProfileInfoKHR const *>( &rhs ) )
{}
VideoEncodeH264ProfileInfoKHR & operator=( VideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264ProfileInfoKHR & operator=( VkVideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoKHR & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
{
stdProfileIdc = stdProfileIdc_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264ProfileInfoKHR*>( this );
}
operator VkVideoEncodeH264ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264ProfileInfoKHR*>( this );
}
operator VkVideoEncodeH264ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264ProfileInfoKHR*>( this );
}
operator VkVideoEncodeH264ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264ProfileInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoH264ProfileIdc const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stdProfileIdc );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 );
}
bool operator!=( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileInfoKHR;
const void * pNext = {};
StdVideoH264ProfileIdc stdProfileIdc = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264ProfileInfoKHR>
{
using Type = VideoEncodeH264ProfileInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH264QpKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264QpKHR.html
struct VideoEncodeH264QpKHR
{
using NativeType = VkVideoEncodeH264QpKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264QpKHR(int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {}) VULKAN_HPP_NOEXCEPT
: qpI{ qpI_ }, qpP{ qpP_ }, qpB{ qpB_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264QpKHR( VideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264QpKHR( VkVideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264QpKHR( *reinterpret_cast<VideoEncodeH264QpKHR const *>( &rhs ) )
{}
VideoEncodeH264QpKHR & operator=( VideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264QpKHR & operator=( VkVideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT
{
qpI = qpI_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT
{
qpP = qpP_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT
{
qpB = qpB_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264QpKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264QpKHR*>( this );
}
operator VkVideoEncodeH264QpKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264QpKHR*>( this );
}
operator VkVideoEncodeH264QpKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264QpKHR*>( this );
}
operator VkVideoEncodeH264QpKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264QpKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<int32_t const &, int32_t const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( qpI, qpP, qpB );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264QpKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( qpI == rhs.qpI )
&& ( qpP == rhs.qpP )
&& ( qpB == rhs.qpB );
#endif
}
bool operator!=( VideoEncodeH264QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
int32_t qpI = {};
int32_t qpP = {};
int32_t qpB = {};
};
// wrapper struct for struct VkVideoEncodeH264QualityLevelPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264QualityLevelPropertiesKHR.html
struct VideoEncodeH264QualityLevelPropertiesKHR
{
using NativeType = VkVideoEncodeH264QualityLevelPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264QualityLevelPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264QualityLevelPropertiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR preferredRateControlFlags_ = {}, uint32_t preferredGopFrameCount_ = {}, uint32_t preferredIdrPeriod_ = {}, uint32_t preferredConsecutiveBFrameCount_ = {}, uint32_t preferredTemporalLayerCount_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR preferredConstantQp_ = {}, uint32_t preferredMaxL0ReferenceCount_ = {}, uint32_t preferredMaxL1ReferenceCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 preferredStdEntropyCodingModeFlag_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, preferredRateControlFlags{ preferredRateControlFlags_ }, preferredGopFrameCount{ preferredGopFrameCount_ }, preferredIdrPeriod{ preferredIdrPeriod_ }, preferredConsecutiveBFrameCount{ preferredConsecutiveBFrameCount_ }, preferredTemporalLayerCount{ preferredTemporalLayerCount_ }, preferredConstantQp{ preferredConstantQp_ }, preferredMaxL0ReferenceCount{ preferredMaxL0ReferenceCount_ }, preferredMaxL1ReferenceCount{ preferredMaxL1ReferenceCount_ }, preferredStdEntropyCodingModeFlag{ preferredStdEntropyCodingModeFlag_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264QualityLevelPropertiesKHR( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264QualityLevelPropertiesKHR( VkVideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264QualityLevelPropertiesKHR( *reinterpret_cast<VideoEncodeH264QualityLevelPropertiesKHR const *>( &rhs ) )
{}
VideoEncodeH264QualityLevelPropertiesKHR & operator=( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264QualityLevelPropertiesKHR & operator=( VkVideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeH264QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264QualityLevelPropertiesKHR*>( this );
}
operator VkVideoEncodeH264QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264QualityLevelPropertiesKHR*>( this );
}
operator VkVideoEncodeH264QualityLevelPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264QualityLevelPropertiesKHR*>( this );
}
operator VkVideoEncodeH264QualityLevelPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264QualityLevelPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, preferredRateControlFlags, preferredGopFrameCount, preferredIdrPeriod, preferredConsecutiveBFrameCount, preferredTemporalLayerCount, preferredConstantQp, preferredMaxL0ReferenceCount, preferredMaxL1ReferenceCount, preferredStdEntropyCodingModeFlag );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264QualityLevelPropertiesKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( preferredRateControlFlags == rhs.preferredRateControlFlags )
&& ( preferredGopFrameCount == rhs.preferredGopFrameCount )
&& ( preferredIdrPeriod == rhs.preferredIdrPeriod )
&& ( preferredConsecutiveBFrameCount == rhs.preferredConsecutiveBFrameCount )
&& ( preferredTemporalLayerCount == rhs.preferredTemporalLayerCount )
&& ( preferredConstantQp == rhs.preferredConstantQp )
&& ( preferredMaxL0ReferenceCount == rhs.preferredMaxL0ReferenceCount )
&& ( preferredMaxL1ReferenceCount == rhs.preferredMaxL1ReferenceCount )
&& ( preferredStdEntropyCodingModeFlag == rhs.preferredStdEntropyCodingModeFlag );
#endif
}
bool operator!=( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264QualityLevelPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR preferredRateControlFlags = {};
uint32_t preferredGopFrameCount = {};
uint32_t preferredIdrPeriod = {};
uint32_t preferredConsecutiveBFrameCount = {};
uint32_t preferredTemporalLayerCount = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR preferredConstantQp = {};
uint32_t preferredMaxL0ReferenceCount = {};
uint32_t preferredMaxL1ReferenceCount = {};
VULKAN_HPP_NAMESPACE::Bool32 preferredStdEntropyCodingModeFlag = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264QualityLevelPropertiesKHR>
{
using Type = VideoEncodeH264QualityLevelPropertiesKHR;
};
// wrapper struct for struct VkVideoEncodeH264QuantizationMapCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264QuantizationMapCapabilitiesKHR.html
struct VideoEncodeH264QuantizationMapCapabilitiesKHR
{
using NativeType = VkVideoEncodeH264QuantizationMapCapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264QuantizationMapCapabilitiesKHR(int32_t minQpDelta_ = {}, int32_t maxQpDelta_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minQpDelta{ minQpDelta_ }, maxQpDelta{ maxQpDelta_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264QuantizationMapCapabilitiesKHR( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264QuantizationMapCapabilitiesKHR( VkVideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264QuantizationMapCapabilitiesKHR( *reinterpret_cast<VideoEncodeH264QuantizationMapCapabilitiesKHR const *>( &rhs ) )
{}
VideoEncodeH264QuantizationMapCapabilitiesKHR & operator=( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264QuantizationMapCapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264QuantizationMapCapabilitiesKHR*>( this );
}
operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264QuantizationMapCapabilitiesKHR*>( this );
}
operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264QuantizationMapCapabilitiesKHR*>( this );
}
operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264QuantizationMapCapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, int32_t const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minQpDelta, maxQpDelta );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264QuantizationMapCapabilitiesKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minQpDelta == rhs.minQpDelta )
&& ( maxQpDelta == rhs.maxQpDelta );
#endif
}
bool operator!=( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR;
void * pNext = {};
int32_t minQpDelta = {};
int32_t maxQpDelta = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR>
{
using Type = VideoEncodeH264QuantizationMapCapabilitiesKHR;
};
// wrapper struct for struct VkVideoEncodeH264RateControlInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264RateControlInfoKHR.html
struct VideoEncodeH264RateControlInfoKHR
{
using NativeType = VkVideoEncodeH264RateControlInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR flags_ = {}, uint32_t gopFrameCount_ = {}, uint32_t idrPeriod_ = {}, uint32_t consecutiveBFrameCount_ = {}, uint32_t temporalLayerCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, gopFrameCount{ gopFrameCount_ }, idrPeriod{ idrPeriod_ }, consecutiveBFrameCount{ consecutiveBFrameCount_ }, temporalLayerCount{ temporalLayerCount_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoKHR( VideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264RateControlInfoKHR( VkVideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264RateControlInfoKHR( *reinterpret_cast<VideoEncodeH264RateControlInfoKHR const *>( &rhs ) )
{}
VideoEncodeH264RateControlInfoKHR & operator=( VideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264RateControlInfoKHR & operator=( VkVideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
{
gopFrameCount = gopFrameCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT
{
idrPeriod = idrPeriod_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT
{
consecutiveBFrameCount = consecutiveBFrameCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setTemporalLayerCount( uint32_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT
{
temporalLayerCount = temporalLayerCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264RateControlInfoKHR*>( this );
}
operator VkVideoEncodeH264RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264RateControlInfoKHR*>( this );
}
operator VkVideoEncodeH264RateControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264RateControlInfoKHR*>( this );
}
operator VkVideoEncodeH264RateControlInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264RateControlInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, gopFrameCount, idrPeriod, consecutiveBFrameCount, temporalLayerCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264RateControlInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( gopFrameCount == rhs.gopFrameCount )
&& ( idrPeriod == rhs.idrPeriod )
&& ( consecutiveBFrameCount == rhs.consecutiveBFrameCount )
&& ( temporalLayerCount == rhs.temporalLayerCount );
#endif
}
bool operator!=( VideoEncodeH264RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR flags = {};
uint32_t gopFrameCount = {};
uint32_t idrPeriod = {};
uint32_t consecutiveBFrameCount = {};
uint32_t temporalLayerCount = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264RateControlInfoKHR>
{
using Type = VideoEncodeH264RateControlInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH264RateControlLayerInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264RateControlLayerInfoKHR.html
struct VideoEncodeH264RateControlLayerInfoKHR
{
using NativeType = VkVideoEncodeH264RateControlLayerInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlLayerInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR minQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR maxQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR maxFrameSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, useMinQp{ useMinQp_ }, minQp{ minQp_ }, useMaxQp{ useMaxQp_ }, maxQp{ maxQp_ }, useMaxFrameSize{ useMaxFrameSize_ }, maxFrameSize{ maxFrameSize_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoKHR( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264RateControlLayerInfoKHR( VkVideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264RateControlLayerInfoKHR( *reinterpret_cast<VideoEncodeH264RateControlLayerInfoKHR const *>( &rhs ) )
{}
VideoEncodeH264RateControlLayerInfoKHR & operator=( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264RateControlLayerInfoKHR & operator=( VkVideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT
{
useMinQp = useMinQp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const & minQp_ ) VULKAN_HPP_NOEXCEPT
{
minQp = minQp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT
{
useMaxQp = useMaxQp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const & maxQp_ ) VULKAN_HPP_NOEXCEPT
{
maxQp = maxQp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
useMaxFrameSize = useMaxFrameSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
maxFrameSize = maxFrameSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264RateControlLayerInfoKHR*>( this );
}
operator VkVideoEncodeH264RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264RateControlLayerInfoKHR*>( this );
}
operator VkVideoEncodeH264RateControlLayerInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264RateControlLayerInfoKHR*>( this );
}
operator VkVideoEncodeH264RateControlLayerInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264RateControlLayerInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264RateControlLayerInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( useMinQp == rhs.useMinQp )
&& ( minQp == rhs.minQp )
&& ( useMaxQp == rhs.useMaxQp )
&& ( maxQp == rhs.maxQp )
&& ( useMaxFrameSize == rhs.useMaxFrameSize )
&& ( maxFrameSize == rhs.maxFrameSize );
#endif
}
bool operator!=( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlLayerInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR minQp = {};
VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR maxQp = {};
VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR maxFrameSize = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264RateControlLayerInfoKHR>
{
using Type = VideoEncodeH264RateControlLayerInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH264SessionCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionCreateInfoKHR.html
struct VideoEncodeH264SessionCreateInfoKHR
{
using NativeType = VkVideoEncodeH264SessionCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionCreateInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ = {}, StdVideoH264LevelIdc maxLevelIdc_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, useMaxLevelIdc{ useMaxLevelIdc_ }, maxLevelIdc{ maxLevelIdc_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionCreateInfoKHR( VideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264SessionCreateInfoKHR( VkVideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264SessionCreateInfoKHR( *reinterpret_cast<VideoEncodeH264SessionCreateInfoKHR const *>( &rhs ) )
{}
VideoEncodeH264SessionCreateInfoKHR & operator=( VideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264SessionCreateInfoKHR & operator=( VkVideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setUseMaxLevelIdc( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ ) VULKAN_HPP_NOEXCEPT
{
useMaxLevelIdc = useMaxLevelIdc_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setMaxLevelIdc( StdVideoH264LevelIdc maxLevelIdc_ ) VULKAN_HPP_NOEXCEPT
{
maxLevelIdc = maxLevelIdc_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264SessionCreateInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264SessionCreateInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264SessionCreateInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264SessionCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, StdVideoH264LevelIdc const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, useMaxLevelIdc, maxLevelIdc );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = useMaxLevelIdc <=> rhs.useMaxLevelIdc; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( useMaxLevelIdc == rhs.useMaxLevelIdc )
&& ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 );
}
bool operator!=( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc = {};
StdVideoH264LevelIdc maxLevelIdc = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264SessionCreateInfoKHR>
{
using Type = VideoEncodeH264SessionCreateInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH264SessionParametersAddInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionParametersAddInfoKHR.html
struct VideoEncodeH264SessionParametersAddInfoKHR
{
using NativeType = VkVideoEncodeH264SessionParametersAddInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersAddInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoKHR(uint32_t stdSPSCount_ = {}, const StdVideoH264SequenceParameterSet * pStdSPSs_ = {}, uint32_t stdPPSCount_ = {}, const StdVideoH264PictureParameterSet * pStdPPSs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stdSPSCount{ stdSPSCount_ }, pStdSPSs{ pStdSPSs_ }, stdPPSCount{ stdPPSCount_ }, pStdPPSs{ pStdPPSs_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoKHR( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264SessionParametersAddInfoKHR( VkVideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264SessionParametersAddInfoKHR( *reinterpret_cast<VideoEncodeH264SessionParametersAddInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH264SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) ), pStdSPSs( stdSPSs_.data() ), stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) ), pStdPPSs( stdPPSs_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoEncodeH264SessionParametersAddInfoKHR & operator=( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264SessionParametersAddInfoKHR & operator=( VkVideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
stdSPSCount = stdSPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
pStdSPSs = pStdSPSs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH264SessionParametersAddInfoKHR & setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
pStdSPSs = stdSPSs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
stdPPSCount = stdPPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
pStdPPSs = pStdPPSs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH264SessionParametersAddInfoKHR & setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
pStdPPSs = stdPPSs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264SessionParametersAddInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264SessionParametersAddInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionParametersAddInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264SessionParametersAddInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionParametersAddInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264SessionParametersAddInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const StdVideoH264SequenceParameterSet * const &, uint32_t const &, const StdVideoH264PictureParameterSet * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264SessionParametersAddInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stdSPSCount == rhs.stdSPSCount )
&& ( pStdSPSs == rhs.pStdSPSs )
&& ( stdPPSCount == rhs.stdPPSCount )
&& ( pStdPPSs == rhs.pStdPPSs );
#endif
}
bool operator!=( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersAddInfoKHR;
const void * pNext = {};
uint32_t stdSPSCount = {};
const StdVideoH264SequenceParameterSet * pStdSPSs = {};
uint32_t stdPPSCount = {};
const StdVideoH264PictureParameterSet * pStdPPSs = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersAddInfoKHR>
{
using Type = VideoEncodeH264SessionParametersAddInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH264SessionParametersCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionParametersCreateInfoKHR.html
struct VideoEncodeH264SessionParametersCreateInfoKHR
{
using NativeType = VkVideoEncodeH264SessionParametersCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoKHR(uint32_t maxStdSPSCount_ = {}, uint32_t maxStdPPSCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxStdSPSCount{ maxStdSPSCount_ }, maxStdPPSCount{ maxStdPPSCount_ }, pParametersAddInfo{ pParametersAddInfo_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoKHR( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264SessionParametersCreateInfoKHR( VkVideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264SessionParametersCreateInfoKHR( *reinterpret_cast<VideoEncodeH264SessionParametersCreateInfoKHR const *>( &rhs ) )
{}
VideoEncodeH264SessionParametersCreateInfoKHR & operator=( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264SessionParametersCreateInfoKHR & operator=( VkVideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
maxStdSPSCount = maxStdSPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
maxStdPPSCount = maxStdPPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
{
pParametersAddInfo = pParametersAddInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264SessionParametersCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264SessionParametersCreateInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxStdSPSCount == rhs.maxStdSPSCount )
&& ( maxStdPPSCount == rhs.maxStdPPSCount )
&& ( pParametersAddInfo == rhs.pParametersAddInfo );
#endif
}
bool operator!=( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR;
const void * pNext = {};
uint32_t maxStdSPSCount = {};
uint32_t maxStdPPSCount = {};
const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR>
{
using Type = VideoEncodeH264SessionParametersCreateInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH264SessionParametersFeedbackInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionParametersFeedbackInfoKHR.html
struct VideoEncodeH264SessionParametersFeedbackInfoKHR
{
using NativeType = VkVideoEncodeH264SessionParametersFeedbackInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersFeedbackInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, hasStdSPSOverrides{ hasStdSPSOverrides_ }, hasStdPPSOverrides{ hasStdPPSOverrides_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersFeedbackInfoKHR( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264SessionParametersFeedbackInfoKHR( VkVideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264SessionParametersFeedbackInfoKHR( *reinterpret_cast<VideoEncodeH264SessionParametersFeedbackInfoKHR const *>( &rhs ) )
{}
VideoEncodeH264SessionParametersFeedbackInfoKHR & operator=( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264SessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264SessionParametersFeedbackInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264SessionParametersFeedbackInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264SessionParametersFeedbackInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264SessionParametersFeedbackInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, hasStdSPSOverrides, hasStdPPSOverrides );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264SessionParametersFeedbackInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( hasStdSPSOverrides == rhs.hasStdSPSOverrides )
&& ( hasStdPPSOverrides == rhs.hasStdPPSOverrides );
#endif
}
bool operator!=( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides = {};
VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR>
{
using Type = VideoEncodeH264SessionParametersFeedbackInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH264SessionParametersGetInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionParametersGetInfoKHR.html
struct VideoEncodeH264SessionParametersGetInfoKHR
{
using NativeType = VkVideoEncodeH264SessionParametersGetInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersGetInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersGetInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ = {}, VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ = {}, uint32_t stdSPSId_ = {}, uint32_t stdPPSId_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, writeStdSPS{ writeStdSPS_ }, writeStdPPS{ writeStdPPS_ }, stdSPSId{ stdSPSId_ }, stdPPSId{ stdPPSId_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersGetInfoKHR( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264SessionParametersGetInfoKHR( VkVideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264SessionParametersGetInfoKHR( *reinterpret_cast<VideoEncodeH264SessionParametersGetInfoKHR const *>( &rhs ) )
{}
VideoEncodeH264SessionParametersGetInfoKHR & operator=( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH264SessionParametersGetInfoKHR & operator=( VkVideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setWriteStdSPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ ) VULKAN_HPP_NOEXCEPT
{
writeStdSPS = writeStdSPS_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setWriteStdPPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ ) VULKAN_HPP_NOEXCEPT
{
writeStdPPS = writeStdPPS_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setStdSPSId( uint32_t stdSPSId_ ) VULKAN_HPP_NOEXCEPT
{
stdSPSId = stdSPSId_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setStdPPSId( uint32_t stdPPSId_ ) VULKAN_HPP_NOEXCEPT
{
stdPPSId = stdPPSId_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH264SessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264SessionParametersGetInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264SessionParametersGetInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionParametersGetInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH264SessionParametersGetInfoKHR*>( this );
}
operator VkVideoEncodeH264SessionParametersGetInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH264SessionParametersGetInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, writeStdSPS, writeStdPPS, stdSPSId, stdPPSId );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH264SessionParametersGetInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( writeStdSPS == rhs.writeStdSPS )
&& ( writeStdPPS == rhs.writeStdPPS )
&& ( stdSPSId == rhs.stdSPSId )
&& ( stdPPSId == rhs.stdPPSId );
#endif
}
bool operator!=( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersGetInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS = {};
VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS = {};
uint32_t stdSPSId = {};
uint32_t stdPPSId = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersGetInfoKHR>
{
using Type = VideoEncodeH264SessionParametersGetInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH265CapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265CapabilitiesKHR.html
struct VideoEncodeH265CapabilitiesKHR
{
using NativeType = VkVideoEncodeH265CapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265CapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsKHR flags_ = {}, StdVideoH265LevelIdc maxLevelIdc_ = {}, uint32_t maxSliceSegmentCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxTiles_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR ctbSizes_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes_ = {}, uint32_t maxPPictureL0ReferenceCount_ = {}, uint32_t maxBPictureL0ReferenceCount_ = {}, uint32_t maxL1ReferenceCount_ = {}, uint32_t maxSubLayerCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalSubLayerPattern_ = {}, int32_t minQp_ = {}, int32_t maxQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames_ = {}, VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR stdSyntaxFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, maxLevelIdc{ maxLevelIdc_ }, maxSliceSegmentCount{ maxSliceSegmentCount_ }, maxTiles{ maxTiles_ }, ctbSizes{ ctbSizes_ }, transformBlockSizes{ transformBlockSizes_ }, maxPPictureL0ReferenceCount{ maxPPictureL0ReferenceCount_ }, maxBPictureL0ReferenceCount{ maxBPictureL0ReferenceCount_ }, maxL1ReferenceCount{ maxL1ReferenceCount_ }, maxSubLayerCount{ maxSubLayerCount_ }, expectDyadicTemporalSubLayerPattern{ expectDyadicTemporalSubLayerPattern_ }, minQp{ minQp_ }, maxQp{ maxQp_ }, prefersGopRemainingFrames{ prefersGopRemainingFrames_ }, requiresGopRemainingFrames{ requiresGopRemainingFrames_ }, stdSyntaxFlags{ stdSyntaxFlags_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesKHR( VideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265CapabilitiesKHR( VkVideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265CapabilitiesKHR( *reinterpret_cast<VideoEncodeH265CapabilitiesKHR const *>( &rhs ) )
{}
VideoEncodeH265CapabilitiesKHR & operator=( VideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265CapabilitiesKHR & operator=( VkVideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeH265CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265CapabilitiesKHR*>( this );
}
operator VkVideoEncodeH265CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265CapabilitiesKHR*>( this );
}
operator VkVideoEncodeH265CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265CapabilitiesKHR*>( this );
}
operator VkVideoEncodeH265CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265CapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsKHR const &, StdVideoH265LevelIdc const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, int32_t const &, int32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, maxLevelIdc, maxSliceSegmentCount, maxTiles, ctbSizes, transformBlockSizes, maxPPictureL0ReferenceCount, maxBPictureL0ReferenceCount, maxL1ReferenceCount, maxSubLayerCount, expectDyadicTemporalSubLayerPattern, minQp, maxQp, prefersGopRemainingFrames, requiresGopRemainingFrames, stdSyntaxFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = maxSliceSegmentCount <=> rhs.maxSliceSegmentCount; cmp != 0 ) return cmp;
if ( auto cmp = maxTiles <=> rhs.maxTiles; cmp != 0 ) return cmp;
if ( auto cmp = ctbSizes <=> rhs.ctbSizes; cmp != 0 ) return cmp;
if ( auto cmp = transformBlockSizes <=> rhs.transformBlockSizes; cmp != 0 ) return cmp;
if ( auto cmp = maxPPictureL0ReferenceCount <=> rhs.maxPPictureL0ReferenceCount; cmp != 0 ) return cmp;
if ( auto cmp = maxBPictureL0ReferenceCount <=> rhs.maxBPictureL0ReferenceCount; cmp != 0 ) return cmp;
if ( auto cmp = maxL1ReferenceCount <=> rhs.maxL1ReferenceCount; cmp != 0 ) return cmp;
if ( auto cmp = maxSubLayerCount <=> rhs.maxSubLayerCount; cmp != 0 ) return cmp;
if ( auto cmp = expectDyadicTemporalSubLayerPattern <=> rhs.expectDyadicTemporalSubLayerPattern; cmp != 0 ) return cmp;
if ( auto cmp = minQp <=> rhs.minQp; cmp != 0 ) return cmp;
if ( auto cmp = maxQp <=> rhs.maxQp; cmp != 0 ) return cmp;
if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 ) return cmp;
if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 ) return cmp;
if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 ) return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 )
&& ( maxSliceSegmentCount == rhs.maxSliceSegmentCount )
&& ( maxTiles == rhs.maxTiles )
&& ( ctbSizes == rhs.ctbSizes )
&& ( transformBlockSizes == rhs.transformBlockSizes )
&& ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount )
&& ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount )
&& ( maxL1ReferenceCount == rhs.maxL1ReferenceCount )
&& ( maxSubLayerCount == rhs.maxSubLayerCount )
&& ( expectDyadicTemporalSubLayerPattern == rhs.expectDyadicTemporalSubLayerPattern )
&& ( minQp == rhs.minQp )
&& ( maxQp == rhs.maxQp )
&& ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames )
&& ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames )
&& ( stdSyntaxFlags == rhs.stdSyntaxFlags );
}
bool operator!=( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265CapabilitiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsKHR flags = {};
StdVideoH265LevelIdc maxLevelIdc = {};
uint32_t maxSliceSegmentCount = {};
VULKAN_HPP_NAMESPACE::Extent2D maxTiles = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR ctbSizes = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes = {};
uint32_t maxPPictureL0ReferenceCount = {};
uint32_t maxBPictureL0ReferenceCount = {};
uint32_t maxL1ReferenceCount = {};
uint32_t maxSubLayerCount = {};
VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalSubLayerPattern = {};
int32_t minQp = {};
int32_t maxQp = {};
VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames = {};
VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR stdSyntaxFlags = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265CapabilitiesKHR>
{
using Type = VideoEncodeH265CapabilitiesKHR;
};
// wrapper struct for struct VkVideoEncodeH265DpbSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265DpbSlotInfoKHR.html
struct VideoEncodeH265DpbSlotInfoKHR
{
using NativeType = VkVideoEncodeH265DpbSlotInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265DpbSlotInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoKHR(const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, pStdReferenceInfo{ pStdReferenceInfo_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoKHR( VideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265DpbSlotInfoKHR( VkVideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265DpbSlotInfoKHR( *reinterpret_cast<VideoEncodeH265DpbSlotInfoKHR const *>( &rhs ) )
{}
VideoEncodeH265DpbSlotInfoKHR & operator=( VideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265DpbSlotInfoKHR & operator=( VkVideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdReferenceInfo = pStdReferenceInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265DpbSlotInfoKHR*>( this );
}
operator VkVideoEncodeH265DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265DpbSlotInfoKHR*>( this );
}
operator VkVideoEncodeH265DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265DpbSlotInfoKHR*>( this );
}
operator VkVideoEncodeH265DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265DpbSlotInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoEncodeH265ReferenceInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, pStdReferenceInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265DpbSlotInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( pStdReferenceInfo == rhs.pStdReferenceInfo );
#endif
}
bool operator!=( VideoEncodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265DpbSlotInfoKHR;
const void * pNext = {};
const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265DpbSlotInfoKHR>
{
using Type = VideoEncodeH265DpbSlotInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH265FrameSizeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265FrameSizeKHR.html
struct VideoEncodeH265FrameSizeKHR
{
using NativeType = VkVideoEncodeH265FrameSizeKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeKHR(uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {}) VULKAN_HPP_NOEXCEPT
: frameISize{ frameISize_ }, framePSize{ framePSize_ }, frameBSize{ frameBSize_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeKHR( VideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265FrameSizeKHR( VkVideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265FrameSizeKHR( *reinterpret_cast<VideoEncodeH265FrameSizeKHR const *>( &rhs ) )
{}
VideoEncodeH265FrameSizeKHR & operator=( VideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265FrameSizeKHR & operator=( VkVideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT
{
frameISize = frameISize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT
{
framePSize = framePSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT
{
frameBSize = frameBSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265FrameSizeKHR*>( this );
}
operator VkVideoEncodeH265FrameSizeKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265FrameSizeKHR*>( this );
}
operator VkVideoEncodeH265FrameSizeKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265FrameSizeKHR*>( this );
}
operator VkVideoEncodeH265FrameSizeKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265FrameSizeKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( frameISize, framePSize, frameBSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265FrameSizeKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( frameISize == rhs.frameISize )
&& ( framePSize == rhs.framePSize )
&& ( frameBSize == rhs.frameBSize );
#endif
}
bool operator!=( VideoEncodeH265FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
uint32_t frameISize = {};
uint32_t framePSize = {};
uint32_t frameBSize = {};
};
// wrapper struct for struct VkVideoEncodeH265GopRemainingFrameInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265GopRemainingFrameInfoKHR.html
struct VideoEncodeH265GopRemainingFrameInfoKHR
{
using NativeType = VkVideoEncodeH265GopRemainingFrameInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265GopRemainingFrameInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ = {}, uint32_t gopRemainingI_ = {}, uint32_t gopRemainingP_ = {}, uint32_t gopRemainingB_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, useGopRemainingFrames{ useGopRemainingFrames_ }, gopRemainingI{ gopRemainingI_ }, gopRemainingP{ gopRemainingP_ }, gopRemainingB{ gopRemainingB_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265GopRemainingFrameInfoKHR( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265GopRemainingFrameInfoKHR( VkVideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265GopRemainingFrameInfoKHR( *reinterpret_cast<VideoEncodeH265GopRemainingFrameInfoKHR const *>( &rhs ) )
{}
VideoEncodeH265GopRemainingFrameInfoKHR & operator=( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265GopRemainingFrameInfoKHR & operator=( VkVideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setUseGopRemainingFrames( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT
{
useGopRemainingFrames = useGopRemainingFrames_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingI( uint32_t gopRemainingI_ ) VULKAN_HPP_NOEXCEPT
{
gopRemainingI = gopRemainingI_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingP( uint32_t gopRemainingP_ ) VULKAN_HPP_NOEXCEPT
{
gopRemainingP = gopRemainingP_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingB( uint32_t gopRemainingB_ ) VULKAN_HPP_NOEXCEPT
{
gopRemainingB = gopRemainingB_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265GopRemainingFrameInfoKHR*>( this );
}
operator VkVideoEncodeH265GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265GopRemainingFrameInfoKHR*>( this );
}
operator VkVideoEncodeH265GopRemainingFrameInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265GopRemainingFrameInfoKHR*>( this );
}
operator VkVideoEncodeH265GopRemainingFrameInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265GopRemainingFrameInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingI, gopRemainingP, gopRemainingB );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265GopRemainingFrameInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( useGopRemainingFrames == rhs.useGopRemainingFrames )
&& ( gopRemainingI == rhs.gopRemainingI )
&& ( gopRemainingP == rhs.gopRemainingP )
&& ( gopRemainingB == rhs.gopRemainingB );
#endif
}
bool operator!=( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames = {};
uint32_t gopRemainingI = {};
uint32_t gopRemainingP = {};
uint32_t gopRemainingB = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR>
{
using Type = VideoEncodeH265GopRemainingFrameInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH265NaluSliceSegmentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265NaluSliceSegmentInfoKHR.html
struct VideoEncodeH265NaluSliceSegmentInfoKHR
{
using NativeType = VkVideoEncodeH265NaluSliceSegmentInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoKHR(int32_t constantQp_ = {}, const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, constantQp{ constantQp_ }, pStdSliceSegmentHeader{ pStdSliceSegmentHeader_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoKHR( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265NaluSliceSegmentInfoKHR( VkVideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265NaluSliceSegmentInfoKHR( *reinterpret_cast<VideoEncodeH265NaluSliceSegmentInfoKHR const *>( &rhs ) )
{}
VideoEncodeH265NaluSliceSegmentInfoKHR & operator=( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265NaluSliceSegmentInfoKHR & operator=( VkVideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & setConstantQp( int32_t constantQp_ ) VULKAN_HPP_NOEXCEPT
{
constantQp = constantQp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & setPStdSliceSegmentHeader( const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader_ ) VULKAN_HPP_NOEXCEPT
{
pStdSliceSegmentHeader = pStdSliceSegmentHeader_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265NaluSliceSegmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265NaluSliceSegmentInfoKHR*>( this );
}
operator VkVideoEncodeH265NaluSliceSegmentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265NaluSliceSegmentInfoKHR*>( this );
}
operator VkVideoEncodeH265NaluSliceSegmentInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265NaluSliceSegmentInfoKHR*>( this );
}
operator VkVideoEncodeH265NaluSliceSegmentInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265NaluSliceSegmentInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, int32_t const &, const StdVideoEncodeH265SliceSegmentHeader * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, constantQp, pStdSliceSegmentHeader );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265NaluSliceSegmentInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( constantQp == rhs.constantQp )
&& ( pStdSliceSegmentHeader == rhs.pStdSliceSegmentHeader );
#endif
}
bool operator!=( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR;
const void * pNext = {};
int32_t constantQp = {};
const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR>
{
using Type = VideoEncodeH265NaluSliceSegmentInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH265PictureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265PictureInfoKHR.html
struct VideoEncodeH265PictureInfoKHR
{
using NativeType = VkVideoEncodeH265PictureInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265PictureInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265PictureInfoKHR(uint32_t naluSliceSegmentEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries_ = {}, const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, naluSliceSegmentEntryCount{ naluSliceSegmentEntryCount_ }, pNaluSliceSegmentEntries{ pNaluSliceSegmentEntries_ }, pStdPictureInfo{ pStdPictureInfo_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265PictureInfoKHR( VideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265PictureInfoKHR( VkVideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265PictureInfoKHR( *reinterpret_cast<VideoEncodeH265PictureInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH265PictureInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR> const & naluSliceSegmentEntries_, const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), naluSliceSegmentEntryCount( static_cast<uint32_t>( naluSliceSegmentEntries_.size() ) ), pNaluSliceSegmentEntries( naluSliceSegmentEntries_.data() ), pStdPictureInfo( pStdPictureInfo_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoEncodeH265PictureInfoKHR & operator=( VideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265PictureInfoKHR & operator=( VkVideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setNaluSliceSegmentEntryCount( uint32_t naluSliceSegmentEntryCount_ ) VULKAN_HPP_NOEXCEPT
{
naluSliceSegmentEntryCount = naluSliceSegmentEntryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setPNaluSliceSegmentEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT
{
pNaluSliceSegmentEntries = pNaluSliceSegmentEntries_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH265PictureInfoKHR & setNaluSliceSegmentEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR> const & naluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT
{
naluSliceSegmentEntryCount = static_cast<uint32_t>( naluSliceSegmentEntries_.size() );
pNaluSliceSegmentEntries = naluSliceSegmentEntries_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdPictureInfo = pStdPictureInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265PictureInfoKHR*>( this );
}
operator VkVideoEncodeH265PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265PictureInfoKHR*>( this );
}
operator VkVideoEncodeH265PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265PictureInfoKHR*>( this );
}
operator VkVideoEncodeH265PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265PictureInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * const &, const StdVideoEncodeH265PictureInfo * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, naluSliceSegmentEntryCount, pNaluSliceSegmentEntries, pStdPictureInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265PictureInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( naluSliceSegmentEntryCount == rhs.naluSliceSegmentEntryCount )
&& ( pNaluSliceSegmentEntries == rhs.pNaluSliceSegmentEntries )
&& ( pStdPictureInfo == rhs.pStdPictureInfo );
#endif
}
bool operator!=( VideoEncodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265PictureInfoKHR;
const void * pNext = {};
uint32_t naluSliceSegmentEntryCount = {};
const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries = {};
const StdVideoEncodeH265PictureInfo * pStdPictureInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265PictureInfoKHR>
{
using Type = VideoEncodeH265PictureInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH265ProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265ProfileInfoKHR.html
struct VideoEncodeH265ProfileInfoKHR
{
using NativeType = VkVideoEncodeH265ProfileInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ProfileInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoKHR(StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stdProfileIdc{ stdProfileIdc_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoKHR( VideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265ProfileInfoKHR( VkVideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265ProfileInfoKHR( *reinterpret_cast<VideoEncodeH265ProfileInfoKHR const *>( &rhs ) )
{}
VideoEncodeH265ProfileInfoKHR & operator=( VideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265ProfileInfoKHR & operator=( VkVideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoKHR & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
{
stdProfileIdc = stdProfileIdc_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265ProfileInfoKHR*>( this );
}
operator VkVideoEncodeH265ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265ProfileInfoKHR*>( this );
}
operator VkVideoEncodeH265ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265ProfileInfoKHR*>( this );
}
operator VkVideoEncodeH265ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265ProfileInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoH265ProfileIdc const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stdProfileIdc );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
}
bool operator!=( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ProfileInfoKHR;
const void * pNext = {};
StdVideoH265ProfileIdc stdProfileIdc = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265ProfileInfoKHR>
{
using Type = VideoEncodeH265ProfileInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH265QpKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265QpKHR.html
struct VideoEncodeH265QpKHR
{
using NativeType = VkVideoEncodeH265QpKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265QpKHR(int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {}) VULKAN_HPP_NOEXCEPT
: qpI{ qpI_ }, qpP{ qpP_ }, qpB{ qpB_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265QpKHR( VideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265QpKHR( VkVideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265QpKHR( *reinterpret_cast<VideoEncodeH265QpKHR const *>( &rhs ) )
{}
VideoEncodeH265QpKHR & operator=( VideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265QpKHR & operator=( VkVideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT
{
qpI = qpI_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT
{
qpP = qpP_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT
{
qpB = qpB_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265QpKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265QpKHR*>( this );
}
operator VkVideoEncodeH265QpKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265QpKHR*>( this );
}
operator VkVideoEncodeH265QpKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265QpKHR*>( this );
}
operator VkVideoEncodeH265QpKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265QpKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<int32_t const &, int32_t const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( qpI, qpP, qpB );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265QpKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( qpI == rhs.qpI )
&& ( qpP == rhs.qpP )
&& ( qpB == rhs.qpB );
#endif
}
bool operator!=( VideoEncodeH265QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
int32_t qpI = {};
int32_t qpP = {};
int32_t qpB = {};
};
// wrapper struct for struct VkVideoEncodeH265QualityLevelPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265QualityLevelPropertiesKHR.html
struct VideoEncodeH265QualityLevelPropertiesKHR
{
using NativeType = VkVideoEncodeH265QualityLevelPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265QualityLevelPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265QualityLevelPropertiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR preferredRateControlFlags_ = {}, uint32_t preferredGopFrameCount_ = {}, uint32_t preferredIdrPeriod_ = {}, uint32_t preferredConsecutiveBFrameCount_ = {}, uint32_t preferredSubLayerCount_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR preferredConstantQp_ = {}, uint32_t preferredMaxL0ReferenceCount_ = {}, uint32_t preferredMaxL1ReferenceCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, preferredRateControlFlags{ preferredRateControlFlags_ }, preferredGopFrameCount{ preferredGopFrameCount_ }, preferredIdrPeriod{ preferredIdrPeriod_ }, preferredConsecutiveBFrameCount{ preferredConsecutiveBFrameCount_ }, preferredSubLayerCount{ preferredSubLayerCount_ }, preferredConstantQp{ preferredConstantQp_ }, preferredMaxL0ReferenceCount{ preferredMaxL0ReferenceCount_ }, preferredMaxL1ReferenceCount{ preferredMaxL1ReferenceCount_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265QualityLevelPropertiesKHR( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265QualityLevelPropertiesKHR( VkVideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265QualityLevelPropertiesKHR( *reinterpret_cast<VideoEncodeH265QualityLevelPropertiesKHR const *>( &rhs ) )
{}
VideoEncodeH265QualityLevelPropertiesKHR & operator=( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265QualityLevelPropertiesKHR & operator=( VkVideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeH265QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265QualityLevelPropertiesKHR*>( this );
}
operator VkVideoEncodeH265QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265QualityLevelPropertiesKHR*>( this );
}
operator VkVideoEncodeH265QualityLevelPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265QualityLevelPropertiesKHR*>( this );
}
operator VkVideoEncodeH265QualityLevelPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265QualityLevelPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, preferredRateControlFlags, preferredGopFrameCount, preferredIdrPeriod, preferredConsecutiveBFrameCount, preferredSubLayerCount, preferredConstantQp, preferredMaxL0ReferenceCount, preferredMaxL1ReferenceCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265QualityLevelPropertiesKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( preferredRateControlFlags == rhs.preferredRateControlFlags )
&& ( preferredGopFrameCount == rhs.preferredGopFrameCount )
&& ( preferredIdrPeriod == rhs.preferredIdrPeriod )
&& ( preferredConsecutiveBFrameCount == rhs.preferredConsecutiveBFrameCount )
&& ( preferredSubLayerCount == rhs.preferredSubLayerCount )
&& ( preferredConstantQp == rhs.preferredConstantQp )
&& ( preferredMaxL0ReferenceCount == rhs.preferredMaxL0ReferenceCount )
&& ( preferredMaxL1ReferenceCount == rhs.preferredMaxL1ReferenceCount );
#endif
}
bool operator!=( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265QualityLevelPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR preferredRateControlFlags = {};
uint32_t preferredGopFrameCount = {};
uint32_t preferredIdrPeriod = {};
uint32_t preferredConsecutiveBFrameCount = {};
uint32_t preferredSubLayerCount = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR preferredConstantQp = {};
uint32_t preferredMaxL0ReferenceCount = {};
uint32_t preferredMaxL1ReferenceCount = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265QualityLevelPropertiesKHR>
{
using Type = VideoEncodeH265QualityLevelPropertiesKHR;
};
// wrapper struct for struct VkVideoEncodeH265QuantizationMapCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265QuantizationMapCapabilitiesKHR.html
struct VideoEncodeH265QuantizationMapCapabilitiesKHR
{
using NativeType = VkVideoEncodeH265QuantizationMapCapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265QuantizationMapCapabilitiesKHR(int32_t minQpDelta_ = {}, int32_t maxQpDelta_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, minQpDelta{ minQpDelta_ }, maxQpDelta{ maxQpDelta_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265QuantizationMapCapabilitiesKHR( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265QuantizationMapCapabilitiesKHR( VkVideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265QuantizationMapCapabilitiesKHR( *reinterpret_cast<VideoEncodeH265QuantizationMapCapabilitiesKHR const *>( &rhs ) )
{}
VideoEncodeH265QuantizationMapCapabilitiesKHR & operator=( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265QuantizationMapCapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265QuantizationMapCapabilitiesKHR*>( this );
}
operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265QuantizationMapCapabilitiesKHR*>( this );
}
operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265QuantizationMapCapabilitiesKHR*>( this );
}
operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265QuantizationMapCapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, int32_t const &, int32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, minQpDelta, maxQpDelta );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265QuantizationMapCapabilitiesKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( minQpDelta == rhs.minQpDelta )
&& ( maxQpDelta == rhs.maxQpDelta );
#endif
}
bool operator!=( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR;
void * pNext = {};
int32_t minQpDelta = {};
int32_t maxQpDelta = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR>
{
using Type = VideoEncodeH265QuantizationMapCapabilitiesKHR;
};
// wrapper struct for struct VkVideoEncodeH265RateControlInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265RateControlInfoKHR.html
struct VideoEncodeH265RateControlInfoKHR
{
using NativeType = VkVideoEncodeH265RateControlInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR flags_ = {}, uint32_t gopFrameCount_ = {}, uint32_t idrPeriod_ = {}, uint32_t consecutiveBFrameCount_ = {}, uint32_t subLayerCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, gopFrameCount{ gopFrameCount_ }, idrPeriod{ idrPeriod_ }, consecutiveBFrameCount{ consecutiveBFrameCount_ }, subLayerCount{ subLayerCount_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoKHR( VideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265RateControlInfoKHR( VkVideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265RateControlInfoKHR( *reinterpret_cast<VideoEncodeH265RateControlInfoKHR const *>( &rhs ) )
{}
VideoEncodeH265RateControlInfoKHR & operator=( VideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265RateControlInfoKHR & operator=( VkVideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
{
gopFrameCount = gopFrameCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT
{
idrPeriod = idrPeriod_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT
{
consecutiveBFrameCount = consecutiveBFrameCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setSubLayerCount( uint32_t subLayerCount_ ) VULKAN_HPP_NOEXCEPT
{
subLayerCount = subLayerCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265RateControlInfoKHR*>( this );
}
operator VkVideoEncodeH265RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265RateControlInfoKHR*>( this );
}
operator VkVideoEncodeH265RateControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265RateControlInfoKHR*>( this );
}
operator VkVideoEncodeH265RateControlInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265RateControlInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, gopFrameCount, idrPeriod, consecutiveBFrameCount, subLayerCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265RateControlInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( gopFrameCount == rhs.gopFrameCount )
&& ( idrPeriod == rhs.idrPeriod )
&& ( consecutiveBFrameCount == rhs.consecutiveBFrameCount )
&& ( subLayerCount == rhs.subLayerCount );
#endif
}
bool operator!=( VideoEncodeH265RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR flags = {};
uint32_t gopFrameCount = {};
uint32_t idrPeriod = {};
uint32_t consecutiveBFrameCount = {};
uint32_t subLayerCount = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265RateControlInfoKHR>
{
using Type = VideoEncodeH265RateControlInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH265RateControlLayerInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265RateControlLayerInfoKHR.html
struct VideoEncodeH265RateControlLayerInfoKHR
{
using NativeType = VkVideoEncodeH265RateControlLayerInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlLayerInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR minQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR maxQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR maxFrameSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, useMinQp{ useMinQp_ }, minQp{ minQp_ }, useMaxQp{ useMaxQp_ }, maxQp{ maxQp_ }, useMaxFrameSize{ useMaxFrameSize_ }, maxFrameSize{ maxFrameSize_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoKHR( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265RateControlLayerInfoKHR( VkVideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265RateControlLayerInfoKHR( *reinterpret_cast<VideoEncodeH265RateControlLayerInfoKHR const *>( &rhs ) )
{}
VideoEncodeH265RateControlLayerInfoKHR & operator=( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265RateControlLayerInfoKHR & operator=( VkVideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT
{
useMinQp = useMinQp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const & minQp_ ) VULKAN_HPP_NOEXCEPT
{
minQp = minQp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT
{
useMaxQp = useMaxQp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const & maxQp_ ) VULKAN_HPP_NOEXCEPT
{
maxQp = maxQp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
useMaxFrameSize = useMaxFrameSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
maxFrameSize = maxFrameSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265RateControlLayerInfoKHR*>( this );
}
operator VkVideoEncodeH265RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265RateControlLayerInfoKHR*>( this );
}
operator VkVideoEncodeH265RateControlLayerInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265RateControlLayerInfoKHR*>( this );
}
operator VkVideoEncodeH265RateControlLayerInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265RateControlLayerInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265RateControlLayerInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( useMinQp == rhs.useMinQp )
&& ( minQp == rhs.minQp )
&& ( useMaxQp == rhs.useMaxQp )
&& ( maxQp == rhs.maxQp )
&& ( useMaxFrameSize == rhs.useMaxFrameSize )
&& ( maxFrameSize == rhs.maxFrameSize );
#endif
}
bool operator!=( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlLayerInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR minQp = {};
VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR maxQp = {};
VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR maxFrameSize = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265RateControlLayerInfoKHR>
{
using Type = VideoEncodeH265RateControlLayerInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH265SessionCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionCreateInfoKHR.html
struct VideoEncodeH265SessionCreateInfoKHR
{
using NativeType = VkVideoEncodeH265SessionCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionCreateInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ = {}, StdVideoH265LevelIdc maxLevelIdc_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, useMaxLevelIdc{ useMaxLevelIdc_ }, maxLevelIdc{ maxLevelIdc_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionCreateInfoKHR( VideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265SessionCreateInfoKHR( VkVideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265SessionCreateInfoKHR( *reinterpret_cast<VideoEncodeH265SessionCreateInfoKHR const *>( &rhs ) )
{}
VideoEncodeH265SessionCreateInfoKHR & operator=( VideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265SessionCreateInfoKHR & operator=( VkVideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setUseMaxLevelIdc( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ ) VULKAN_HPP_NOEXCEPT
{
useMaxLevelIdc = useMaxLevelIdc_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setMaxLevelIdc( StdVideoH265LevelIdc maxLevelIdc_ ) VULKAN_HPP_NOEXCEPT
{
maxLevelIdc = maxLevelIdc_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265SessionCreateInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265SessionCreateInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265SessionCreateInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265SessionCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, StdVideoH265LevelIdc const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, useMaxLevelIdc, maxLevelIdc );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = useMaxLevelIdc <=> rhs.useMaxLevelIdc; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( useMaxLevelIdc == rhs.useMaxLevelIdc )
&& ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 );
}
bool operator!=( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc = {};
StdVideoH265LevelIdc maxLevelIdc = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265SessionCreateInfoKHR>
{
using Type = VideoEncodeH265SessionCreateInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH265SessionParametersAddInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionParametersAddInfoKHR.html
struct VideoEncodeH265SessionParametersAddInfoKHR
{
using NativeType = VkVideoEncodeH265SessionParametersAddInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersAddInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoKHR(uint32_t stdVPSCount_ = {}, const StdVideoH265VideoParameterSet * pStdVPSs_ = {}, uint32_t stdSPSCount_ = {}, const StdVideoH265SequenceParameterSet * pStdSPSs_ = {}, uint32_t stdPPSCount_ = {}, const StdVideoH265PictureParameterSet * pStdPPSs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, stdVPSCount{ stdVPSCount_ }, pStdVPSs{ pStdVPSs_ }, stdSPSCount{ stdSPSCount_ }, pStdSPSs{ pStdSPSs_ }, stdPPSCount{ stdPPSCount_ }, pStdPPSs{ pStdPPSs_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoKHR( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265SessionParametersAddInfoKHR( VkVideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265SessionParametersAddInfoKHR( *reinterpret_cast<VideoEncodeH265SessionParametersAddInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH265SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), stdVPSCount( static_cast<uint32_t>( stdVPSs_.size() ) ), pStdVPSs( stdVPSs_.data() ), stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) ), pStdSPSs( stdSPSs_.data() ), stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) ), pStdPPSs( stdPPSs_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoEncodeH265SessionParametersAddInfoKHR & operator=( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265SessionParametersAddInfoKHR & operator=( VkVideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT
{
stdVPSCount = stdVPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT
{
pStdVPSs = pStdVPSs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH265SessionParametersAddInfoKHR & setStdVPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT
{
stdVPSCount = static_cast<uint32_t>( stdVPSs_.size() );
pStdVPSs = stdVPSs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
stdSPSCount = stdSPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
pStdSPSs = pStdSPSs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH265SessionParametersAddInfoKHR & setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
pStdSPSs = stdSPSs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
stdPPSCount = stdPPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
pStdPPSs = pStdPPSs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH265SessionParametersAddInfoKHR & setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
pStdPPSs = stdPPSs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265SessionParametersAddInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265SessionParametersAddInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionParametersAddInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265SessionParametersAddInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionParametersAddInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265SessionParametersAddInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const StdVideoH265VideoParameterSet * const &, uint32_t const &, const StdVideoH265SequenceParameterSet * const &, uint32_t const &, const StdVideoH265PictureParameterSet * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265SessionParametersAddInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stdVPSCount == rhs.stdVPSCount )
&& ( pStdVPSs == rhs.pStdVPSs )
&& ( stdSPSCount == rhs.stdSPSCount )
&& ( pStdSPSs == rhs.pStdSPSs )
&& ( stdPPSCount == rhs.stdPPSCount )
&& ( pStdPPSs == rhs.pStdPPSs );
#endif
}
bool operator!=( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersAddInfoKHR;
const void * pNext = {};
uint32_t stdVPSCount = {};
const StdVideoH265VideoParameterSet * pStdVPSs = {};
uint32_t stdSPSCount = {};
const StdVideoH265SequenceParameterSet * pStdSPSs = {};
uint32_t stdPPSCount = {};
const StdVideoH265PictureParameterSet * pStdPPSs = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersAddInfoKHR>
{
using Type = VideoEncodeH265SessionParametersAddInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH265SessionParametersCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionParametersCreateInfoKHR.html
struct VideoEncodeH265SessionParametersCreateInfoKHR
{
using NativeType = VkVideoEncodeH265SessionParametersCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoKHR(uint32_t maxStdVPSCount_ = {}, uint32_t maxStdSPSCount_ = {}, uint32_t maxStdPPSCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxStdVPSCount{ maxStdVPSCount_ }, maxStdSPSCount{ maxStdSPSCount_ }, maxStdPPSCount{ maxStdPPSCount_ }, pParametersAddInfo{ pParametersAddInfo_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoKHR( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265SessionParametersCreateInfoKHR( VkVideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265SessionParametersCreateInfoKHR( *reinterpret_cast<VideoEncodeH265SessionParametersCreateInfoKHR const *>( &rhs ) )
{}
VideoEncodeH265SessionParametersCreateInfoKHR & operator=( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265SessionParametersCreateInfoKHR & operator=( VkVideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT
{
maxStdVPSCount = maxStdVPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
maxStdSPSCount = maxStdSPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
maxStdPPSCount = maxStdPPSCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
{
pParametersAddInfo = pParametersAddInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265SessionParametersCreateInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265SessionParametersCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265SessionParametersCreateInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxStdVPSCount == rhs.maxStdVPSCount )
&& ( maxStdSPSCount == rhs.maxStdSPSCount )
&& ( maxStdPPSCount == rhs.maxStdPPSCount )
&& ( pParametersAddInfo == rhs.pParametersAddInfo );
#endif
}
bool operator!=( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR;
const void * pNext = {};
uint32_t maxStdVPSCount = {};
uint32_t maxStdSPSCount = {};
uint32_t maxStdPPSCount = {};
const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR>
{
using Type = VideoEncodeH265SessionParametersCreateInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH265SessionParametersFeedbackInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionParametersFeedbackInfoKHR.html
struct VideoEncodeH265SessionParametersFeedbackInfoKHR
{
using NativeType = VkVideoEncodeH265SessionParametersFeedbackInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersFeedbackInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 hasStdVPSOverrides_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, hasStdVPSOverrides{ hasStdVPSOverrides_ }, hasStdSPSOverrides{ hasStdSPSOverrides_ }, hasStdPPSOverrides{ hasStdPPSOverrides_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersFeedbackInfoKHR( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265SessionParametersFeedbackInfoKHR( VkVideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265SessionParametersFeedbackInfoKHR( *reinterpret_cast<VideoEncodeH265SessionParametersFeedbackInfoKHR const *>( &rhs ) )
{}
VideoEncodeH265SessionParametersFeedbackInfoKHR & operator=( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265SessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265SessionParametersFeedbackInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265SessionParametersFeedbackInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265SessionParametersFeedbackInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265SessionParametersFeedbackInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, hasStdVPSOverrides, hasStdSPSOverrides, hasStdPPSOverrides );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265SessionParametersFeedbackInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( hasStdVPSOverrides == rhs.hasStdVPSOverrides )
&& ( hasStdSPSOverrides == rhs.hasStdSPSOverrides )
&& ( hasStdPPSOverrides == rhs.hasStdPPSOverrides );
#endif
}
bool operator!=( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 hasStdVPSOverrides = {};
VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides = {};
VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR>
{
using Type = VideoEncodeH265SessionParametersFeedbackInfoKHR;
};
// wrapper struct for struct VkVideoEncodeH265SessionParametersGetInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionParametersGetInfoKHR.html
struct VideoEncodeH265SessionParametersGetInfoKHR
{
using NativeType = VkVideoEncodeH265SessionParametersGetInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersGetInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersGetInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 writeStdVPS_ = {}, VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ = {}, VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ = {}, uint32_t stdVPSId_ = {}, uint32_t stdSPSId_ = {}, uint32_t stdPPSId_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, writeStdVPS{ writeStdVPS_ }, writeStdSPS{ writeStdSPS_ }, writeStdPPS{ writeStdPPS_ }, stdVPSId{ stdVPSId_ }, stdSPSId{ stdSPSId_ }, stdPPSId{ stdPPSId_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersGetInfoKHR( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265SessionParametersGetInfoKHR( VkVideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265SessionParametersGetInfoKHR( *reinterpret_cast<VideoEncodeH265SessionParametersGetInfoKHR const *>( &rhs ) )
{}
VideoEncodeH265SessionParametersGetInfoKHR & operator=( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeH265SessionParametersGetInfoKHR & operator=( VkVideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdVPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdVPS_ ) VULKAN_HPP_NOEXCEPT
{
writeStdVPS = writeStdVPS_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdSPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ ) VULKAN_HPP_NOEXCEPT
{
writeStdSPS = writeStdSPS_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdPPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ ) VULKAN_HPP_NOEXCEPT
{
writeStdPPS = writeStdPPS_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdVPSId( uint32_t stdVPSId_ ) VULKAN_HPP_NOEXCEPT
{
stdVPSId = stdVPSId_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdSPSId( uint32_t stdSPSId_ ) VULKAN_HPP_NOEXCEPT
{
stdSPSId = stdSPSId_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdPPSId( uint32_t stdPPSId_ ) VULKAN_HPP_NOEXCEPT
{
stdPPSId = stdPPSId_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeH265SessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265SessionParametersGetInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265SessionParametersGetInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionParametersGetInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeH265SessionParametersGetInfoKHR*>( this );
}
operator VkVideoEncodeH265SessionParametersGetInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeH265SessionParametersGetInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, writeStdVPS, writeStdSPS, writeStdPPS, stdVPSId, stdSPSId, stdPPSId );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeH265SessionParametersGetInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( writeStdVPS == rhs.writeStdVPS )
&& ( writeStdSPS == rhs.writeStdSPS )
&& ( writeStdPPS == rhs.writeStdPPS )
&& ( stdVPSId == rhs.stdVPSId )
&& ( stdSPSId == rhs.stdSPSId )
&& ( stdPPSId == rhs.stdPPSId );
#endif
}
bool operator!=( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersGetInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 writeStdVPS = {};
VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS = {};
VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS = {};
uint32_t stdVPSId = {};
uint32_t stdSPSId = {};
uint32_t stdPPSId = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersGetInfoKHR>
{
using Type = VideoEncodeH265SessionParametersGetInfoKHR;
};
// wrapper struct for struct VkVideoEncodeInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeInfoKHR.html
struct VideoEncodeInfoKHR
{
using NativeType = VkVideoEncodeInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange_ = {}, VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {}, uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, uint32_t precedingExternallyEncodedBytes_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, dstBuffer{ dstBuffer_ }, dstBufferOffset{ dstBufferOffset_ }, dstBufferRange{ dstBufferRange_ }, srcPictureResource{ srcPictureResource_ }, pSetupReferenceSlot{ pSetupReferenceSlot_ }, referenceSlotCount{ referenceSlotCount_ }, pReferenceSlots{ pReferenceSlots_ }, precedingExternallyEncodedBytes{ precedingExternallyEncodedBytes_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeInfoKHR( *reinterpret_cast<VideoEncodeInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset_, VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange_, VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_, uint32_t precedingExternallyEncodedBytes_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), dstBuffer( dstBuffer_ ), dstBufferOffset( dstBufferOffset_ ), dstBufferRange( dstBufferRange_ ), srcPictureResource( srcPictureResource_ ), pSetupReferenceSlot( pSetupReferenceSlot_ ), referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) ), pReferenceSlots( referenceSlots_.data() ), precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoEncodeInfoKHR & operator=( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeInfoKHR & operator=( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
{
dstBuffer = dstBuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstBufferOffset = dstBufferOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange_ ) VULKAN_HPP_NOEXCEPT
{
dstBufferRange = dstBufferRange_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setSrcPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT
{
srcPictureResource = srcPictureResource_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
{
pSetupReferenceSlot = pSetupReferenceSlot_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
{
referenceSlotCount = referenceSlotCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
{
pReferenceSlots = pReferenceSlots_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeInfoKHR & setReferenceSlots( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
{
referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
pReferenceSlots = referenceSlots_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPrecedingExternallyEncodedBytes( uint32_t precedingExternallyEncodedBytes_ ) VULKAN_HPP_NOEXCEPT
{
precedingExternallyEncodedBytes = precedingExternallyEncodedBytes_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeInfoKHR*>( this );
}
operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeInfoKHR*>( this );
}
operator VkVideoEncodeInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeInfoKHR*>( this );
}
operator VkVideoEncodeInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, dstBuffer, dstBufferOffset, dstBufferRange, srcPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots, precedingExternallyEncodedBytes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( dstBuffer == rhs.dstBuffer )
&& ( dstBufferOffset == rhs.dstBufferOffset )
&& ( dstBufferRange == rhs.dstBufferRange )
&& ( srcPictureResource == rhs.srcPictureResource )
&& ( pSetupReferenceSlot == rhs.pSetupReferenceSlot )
&& ( referenceSlotCount == rhs.referenceSlotCount )
&& ( pReferenceSlots == rhs.pReferenceSlots )
&& ( precedingExternallyEncodedBytes == rhs.precedingExternallyEncodedBytes );
#endif
}
bool operator!=( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange = {};
VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource = {};
const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {};
uint32_t referenceSlotCount = {};
const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {};
uint32_t precedingExternallyEncodedBytes = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeInfoKHR>
{
using Type = VideoEncodeInfoKHR;
};
// wrapper struct for struct VkVideoEncodeQualityLevelInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQualityLevelInfoKHR.html
struct VideoEncodeQualityLevelInfoKHR
{
using NativeType = VkVideoEncodeQualityLevelInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQualityLevelInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelInfoKHR(uint32_t qualityLevel_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, qualityLevel{ qualityLevel_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelInfoKHR( VideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeQualityLevelInfoKHR( VkVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeQualityLevelInfoKHR( *reinterpret_cast<VideoEncodeQualityLevelInfoKHR const *>( &rhs ) )
{}
VideoEncodeQualityLevelInfoKHR & operator=( VideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeQualityLevelInfoKHR & operator=( VkVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeQualityLevelInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeQualityLevelInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT
{
qualityLevel = qualityLevel_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeQualityLevelInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeQualityLevelInfoKHR*>( this );
}
operator VkVideoEncodeQualityLevelInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeQualityLevelInfoKHR*>( this );
}
operator VkVideoEncodeQualityLevelInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeQualityLevelInfoKHR*>( this );
}
operator VkVideoEncodeQualityLevelInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeQualityLevelInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, qualityLevel );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeQualityLevelInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( qualityLevel == rhs.qualityLevel );
#endif
}
bool operator!=( VideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQualityLevelInfoKHR;
const void * pNext = {};
uint32_t qualityLevel = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeQualityLevelInfoKHR>
{
using Type = VideoEncodeQualityLevelInfoKHR;
};
// wrapper struct for struct VkVideoEncodeQualityLevelPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQualityLevelPropertiesKHR.html
struct VideoEncodeQualityLevelPropertiesKHR
{
using NativeType = VkVideoEncodeQualityLevelPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQualityLevelPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelPropertiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault, uint32_t preferredRateControlLayerCount_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, preferredRateControlMode{ preferredRateControlMode_ }, preferredRateControlLayerCount{ preferredRateControlLayerCount_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelPropertiesKHR( VideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeQualityLevelPropertiesKHR( VkVideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeQualityLevelPropertiesKHR( *reinterpret_cast<VideoEncodeQualityLevelPropertiesKHR const *>( &rhs ) )
{}
VideoEncodeQualityLevelPropertiesKHR & operator=( VideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeQualityLevelPropertiesKHR & operator=( VkVideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeQualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeQualityLevelPropertiesKHR*>( this );
}
operator VkVideoEncodeQualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR*>( this );
}
operator VkVideoEncodeQualityLevelPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeQualityLevelPropertiesKHR*>( this );
}
operator VkVideoEncodeQualityLevelPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, preferredRateControlMode, preferredRateControlLayerCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeQualityLevelPropertiesKHR const & ) const = default;
#else
bool operator==( VideoEncodeQualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( preferredRateControlMode == rhs.preferredRateControlMode )
&& ( preferredRateControlLayerCount == rhs.preferredRateControlLayerCount );
#endif
}
bool operator!=( VideoEncodeQualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQualityLevelPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault;
uint32_t preferredRateControlLayerCount = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeQualityLevelPropertiesKHR>
{
using Type = VideoEncodeQualityLevelPropertiesKHR;
};
// wrapper struct for struct VkVideoEncodeQuantizationMapCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQuantizationMapCapabilitiesKHR.html
struct VideoEncodeQuantizationMapCapabilitiesKHR
{
using NativeType = VkVideoEncodeQuantizationMapCapabilitiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapCapabilitiesKHR(VULKAN_HPP_NAMESPACE::Extent2D maxQuantizationMapExtent_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, maxQuantizationMapExtent{ maxQuantizationMapExtent_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapCapabilitiesKHR( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeQuantizationMapCapabilitiesKHR( VkVideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeQuantizationMapCapabilitiesKHR( *reinterpret_cast<VideoEncodeQuantizationMapCapabilitiesKHR const *>( &rhs ) )
{}
VideoEncodeQuantizationMapCapabilitiesKHR & operator=( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeQuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapCapabilitiesKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeQuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeQuantizationMapCapabilitiesKHR*>( this );
}
operator VkVideoEncodeQuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeQuantizationMapCapabilitiesKHR*>( this );
}
operator VkVideoEncodeQuantizationMapCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeQuantizationMapCapabilitiesKHR*>( this );
}
operator VkVideoEncodeQuantizationMapCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeQuantizationMapCapabilitiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, maxQuantizationMapExtent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeQuantizationMapCapabilitiesKHR const & ) const = default;
#else
bool operator==( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxQuantizationMapExtent == rhs.maxQuantizationMapExtent );
#endif
}
bool operator!=( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D maxQuantizationMapExtent = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR>
{
using Type = VideoEncodeQuantizationMapCapabilitiesKHR;
};
// wrapper struct for struct VkVideoEncodeQuantizationMapInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQuantizationMapInfoKHR.html
struct VideoEncodeQuantizationMapInfoKHR
{
using NativeType = VkVideoEncodeQuantizationMapInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQuantizationMapInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapInfoKHR(VULKAN_HPP_NAMESPACE::ImageView quantizationMap_ = {}, VULKAN_HPP_NAMESPACE::Extent2D quantizationMapExtent_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, quantizationMap{ quantizationMap_ }, quantizationMapExtent{ quantizationMapExtent_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapInfoKHR( VideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeQuantizationMapInfoKHR( VkVideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeQuantizationMapInfoKHR( *reinterpret_cast<VideoEncodeQuantizationMapInfoKHR const *>( &rhs ) )
{}
VideoEncodeQuantizationMapInfoKHR & operator=( VideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeQuantizationMapInfoKHR & operator=( VkVideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & setQuantizationMap( VULKAN_HPP_NAMESPACE::ImageView quantizationMap_ ) VULKAN_HPP_NOEXCEPT
{
quantizationMap = quantizationMap_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & setQuantizationMapExtent( VULKAN_HPP_NAMESPACE::Extent2D const & quantizationMapExtent_ ) VULKAN_HPP_NOEXCEPT
{
quantizationMapExtent = quantizationMapExtent_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeQuantizationMapInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeQuantizationMapInfoKHR*>( this );
}
operator VkVideoEncodeQuantizationMapInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeQuantizationMapInfoKHR*>( this );
}
operator VkVideoEncodeQuantizationMapInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeQuantizationMapInfoKHR*>( this );
}
operator VkVideoEncodeQuantizationMapInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeQuantizationMapInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, quantizationMap, quantizationMapExtent );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeQuantizationMapInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeQuantizationMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( quantizationMap == rhs.quantizationMap )
&& ( quantizationMapExtent == rhs.quantizationMapExtent );
#endif
}
bool operator!=( VideoEncodeQuantizationMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQuantizationMapInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageView quantizationMap = {};
VULKAN_HPP_NAMESPACE::Extent2D quantizationMapExtent = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeQuantizationMapInfoKHR>
{
using Type = VideoEncodeQuantizationMapInfoKHR;
};
// wrapper struct for struct VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR.html
struct VideoEncodeQuantizationMapSessionParametersCreateInfoKHR
{
using NativeType = VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapSessionParametersCreateInfoKHR(VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, quantizationMapTexelSize{ quantizationMapTexelSize_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( *reinterpret_cast<VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const *>( &rhs ) )
{}
VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & operator=( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & operator=( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & setQuantizationMapTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & quantizationMapTexelSize_ ) VULKAN_HPP_NOEXCEPT
{
quantizationMapTexelSize = quantizationMapTexelSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR*>( this );
}
operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR*>( this );
}
operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR*>( this );
}
operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, quantizationMapTexelSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( quantizationMapTexelSize == rhs.quantizationMapTexelSize );
#endif
}
bool operator!=( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR>
{
using Type = VideoEncodeQuantizationMapSessionParametersCreateInfoKHR;
};
// wrapper struct for struct VkVideoEncodeRateControlLayerInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeRateControlLayerInfoKHR.html
struct VideoEncodeRateControlLayerInfoKHR
{
using NativeType = VkVideoEncodeRateControlLayerInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlLayerInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR(uint64_t averageBitrate_ = {}, uint64_t maxBitrate_ = {}, uint32_t frameRateNumerator_ = {}, uint32_t frameRateDenominator_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, averageBitrate{ averageBitrate_ }, maxBitrate{ maxBitrate_ }, frameRateNumerator{ frameRateNumerator_ }, frameRateDenominator{ frameRateDenominator_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeRateControlLayerInfoKHR( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeRateControlLayerInfoKHR( *reinterpret_cast<VideoEncodeRateControlLayerInfoKHR const *>( &rhs ) )
{}
VideoEncodeRateControlLayerInfoKHR & operator=( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeRateControlLayerInfoKHR & operator=( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setAverageBitrate( uint64_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT
{
averageBitrate = averageBitrate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setMaxBitrate( uint64_t maxBitrate_ ) VULKAN_HPP_NOEXCEPT
{
maxBitrate = maxBitrate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateNumerator( uint32_t frameRateNumerator_ ) VULKAN_HPP_NOEXCEPT
{
frameRateNumerator = frameRateNumerator_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateDenominator( uint32_t frameRateDenominator_ ) VULKAN_HPP_NOEXCEPT
{
frameRateDenominator = frameRateDenominator_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeRateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeRateControlLayerInfoKHR*>( this );
}
operator VkVideoEncodeRateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeRateControlLayerInfoKHR*>( this );
}
operator VkVideoEncodeRateControlLayerInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeRateControlLayerInfoKHR*>( this );
}
operator VkVideoEncodeRateControlLayerInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeRateControlLayerInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, uint64_t const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, averageBitrate, maxBitrate, frameRateNumerator, frameRateDenominator );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeRateControlLayerInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( averageBitrate == rhs.averageBitrate )
&& ( maxBitrate == rhs.maxBitrate )
&& ( frameRateNumerator == rhs.frameRateNumerator )
&& ( frameRateDenominator == rhs.frameRateDenominator );
#endif
}
bool operator!=( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlLayerInfoKHR;
const void * pNext = {};
uint64_t averageBitrate = {};
uint64_t maxBitrate = {};
uint32_t frameRateNumerator = {};
uint32_t frameRateDenominator = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeRateControlLayerInfoKHR>
{
using Type = VideoEncodeRateControlLayerInfoKHR;
};
// wrapper struct for struct VkVideoEncodeRateControlInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeRateControlInfoKHR.html
struct VideoEncodeRateControlInfoKHR
{
using NativeType = VkVideoEncodeRateControlInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault, uint32_t layerCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayers_ = {}, uint32_t virtualBufferSizeInMs_ = {}, uint32_t initialVirtualBufferSizeInMs_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, rateControlMode{ rateControlMode_ }, layerCount{ layerCount_ }, pLayers{ pLayers_ }, virtualBufferSizeInMs{ virtualBufferSizeInMs_ }, initialVirtualBufferSizeInMs{ initialVirtualBufferSizeInMs_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeRateControlInfoKHR( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeRateControlInfoKHR( *reinterpret_cast<VideoEncodeRateControlInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeRateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR> const & layers_, uint32_t virtualBufferSizeInMs_ = {}, uint32_t initialVirtualBufferSizeInMs_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), flags( flags_ ), rateControlMode( rateControlMode_ ), layerCount( static_cast<uint32_t>( layers_.size() ) ), pLayers( layers_.data() ), virtualBufferSizeInMs( virtualBufferSizeInMs_ ), initialVirtualBufferSizeInMs( initialVirtualBufferSizeInMs_ )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoEncodeRateControlInfoKHR & operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeRateControlInfoKHR & operator=( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setRateControlMode( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT
{
rateControlMode = rateControlMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
{
layerCount = layerCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPLayers( const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayers_ ) VULKAN_HPP_NOEXCEPT
{
pLayers = pLayers_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeRateControlInfoKHR & setLayers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR> const & layers_ ) VULKAN_HPP_NOEXCEPT
{
layerCount = static_cast<uint32_t>( layers_.size() );
pLayers = layers_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
{
virtualBufferSizeInMs = virtualBufferSizeInMs_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setInitialVirtualBufferSizeInMs( uint32_t initialVirtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
{
initialVirtualBufferSizeInMs = initialVirtualBufferSizeInMs_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeRateControlInfoKHR*>( this );
}
operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeRateControlInfoKHR*>( this );
}
operator VkVideoEncodeRateControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeRateControlInfoKHR*>( this );
}
operator VkVideoEncodeRateControlInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeRateControlInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, rateControlMode, layerCount, pLayers, virtualBufferSizeInMs, initialVirtualBufferSizeInMs );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeRateControlInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( rateControlMode == rhs.rateControlMode )
&& ( layerCount == rhs.layerCount )
&& ( pLayers == rhs.pLayers )
&& ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs )
&& ( initialVirtualBufferSizeInMs == rhs.initialVirtualBufferSizeInMs );
#endif
}
bool operator!=( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault;
uint32_t layerCount = {};
const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayers = {};
uint32_t virtualBufferSizeInMs = {};
uint32_t initialVirtualBufferSizeInMs = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeRateControlInfoKHR>
{
using Type = VideoEncodeRateControlInfoKHR;
};
// wrapper struct for struct VkVideoEncodeSessionParametersFeedbackInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeSessionParametersFeedbackInfoKHR.html
struct VideoEncodeSessionParametersFeedbackInfoKHR
{
using NativeType = VkVideoEncodeSessionParametersFeedbackInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersFeedbackInfoKHR(VULKAN_HPP_NAMESPACE::Bool32 hasOverrides_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, hasOverrides{ hasOverrides_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersFeedbackInfoKHR( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeSessionParametersFeedbackInfoKHR( VkVideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeSessionParametersFeedbackInfoKHR( *reinterpret_cast<VideoEncodeSessionParametersFeedbackInfoKHR const *>( &rhs ) )
{}
VideoEncodeSessionParametersFeedbackInfoKHR & operator=( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeSessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR const *>( &rhs );
return *this;
}
operator VkVideoEncodeSessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeSessionParametersFeedbackInfoKHR*>( this );
}
operator VkVideoEncodeSessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR*>( this );
}
operator VkVideoEncodeSessionParametersFeedbackInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeSessionParametersFeedbackInfoKHR*>( this );
}
operator VkVideoEncodeSessionParametersFeedbackInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, hasOverrides );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeSessionParametersFeedbackInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( hasOverrides == rhs.hasOverrides );
#endif
}
bool operator!=( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 hasOverrides = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR>
{
using Type = VideoEncodeSessionParametersFeedbackInfoKHR;
};
// wrapper struct for struct VkVideoEncodeSessionParametersGetInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeSessionParametersGetInfoKHR.html
struct VideoEncodeSessionParametersGetInfoKHR
{
using NativeType = VkVideoEncodeSessionParametersGetInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeSessionParametersGetInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersGetInfoKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, videoSessionParameters{ videoSessionParameters_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersGetInfoKHR( VideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeSessionParametersGetInfoKHR( VkVideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeSessionParametersGetInfoKHR( *reinterpret_cast<VideoEncodeSessionParametersGetInfoKHR const *>( &rhs ) )
{}
VideoEncodeSessionParametersGetInfoKHR & operator=( VideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeSessionParametersGetInfoKHR & operator=( VkVideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionParametersGetInfoKHR & setVideoSessionParameters( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT
{
videoSessionParameters = videoSessionParameters_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeSessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR*>( this );
}
operator VkVideoEncodeSessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeSessionParametersGetInfoKHR*>( this );
}
operator VkVideoEncodeSessionParametersGetInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR*>( this );
}
operator VkVideoEncodeSessionParametersGetInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeSessionParametersGetInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, videoSessionParameters );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeSessionParametersGetInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeSessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( videoSessionParameters == rhs.videoSessionParameters );
#endif
}
bool operator!=( VideoEncodeSessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeSessionParametersGetInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeSessionParametersGetInfoKHR>
{
using Type = VideoEncodeSessionParametersGetInfoKHR;
};
// wrapper struct for struct VkVideoEncodeUsageInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeUsageInfoKHR.html
struct VideoEncodeUsageInfoKHR
{
using NativeType = VkVideoEncodeUsageInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeUsageInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeUsageInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR::eDefault, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, videoUsageHints{ videoUsageHints_ }, videoContentHints{ videoContentHints_ }, tuningMode{ tuningMode_ }
{}
VULKAN_HPP_CONSTEXPR VideoEncodeUsageInfoKHR( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeUsageInfoKHR( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeUsageInfoKHR( *reinterpret_cast<VideoEncodeUsageInfoKHR const *>( &rhs ) )
{}
VideoEncodeUsageInfoKHR & operator=( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEncodeUsageInfoKHR & operator=( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setVideoUsageHints( VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT
{
videoUsageHints = videoUsageHints_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setVideoContentHints( VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ ) VULKAN_HPP_NOEXCEPT
{
videoContentHints = videoContentHints_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setTuningMode( VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode_ ) VULKAN_HPP_NOEXCEPT
{
tuningMode = tuningMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEncodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeUsageInfoKHR*>( this );
}
operator VkVideoEncodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeUsageInfoKHR*>( this );
}
operator VkVideoEncodeUsageInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEncodeUsageInfoKHR*>( this );
}
operator VkVideoEncodeUsageInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEncodeUsageInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, videoUsageHints, videoContentHints, tuningMode );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEncodeUsageInfoKHR const & ) const = default;
#else
bool operator==( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( videoUsageHints == rhs.videoUsageHints )
&& ( videoContentHints == rhs.videoContentHints )
&& ( tuningMode == rhs.tuningMode );
#endif
}
bool operator!=( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeUsageInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints = {};
VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints = {};
VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode = VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR::eDefault;
};
template <>
struct CppType<StructureType, StructureType::eVideoEncodeUsageInfoKHR>
{
using Type = VideoEncodeUsageInfoKHR;
};
// wrapper struct for struct VkVideoEndCodingInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEndCodingInfoKHR.html
struct VideoEndCodingInfoKHR
{
using NativeType = VkVideoEndCodingInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEndCodingInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR(VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }
{}
VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEndCodingInfoKHR( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEndCodingInfoKHR( *reinterpret_cast<VideoEndCodingInfoKHR const *>( &rhs ) )
{}
VideoEndCodingInfoKHR & operator=( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoEndCodingInfoKHR & operator=( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEndCodingInfoKHR*>( this );
}
operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEndCodingInfoKHR*>( this );
}
operator VkVideoEndCodingInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoEndCodingInfoKHR*>( this );
}
operator VkVideoEndCodingInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoEndCodingInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoEndCodingInfoKHR const & ) const = default;
#else
bool operator==( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags );
#endif
}
bool operator!=( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEndCodingInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoEndCodingInfoKHR>
{
using Type = VideoEndCodingInfoKHR;
};
// wrapper struct for struct VkVideoFormatAV1QuantizationMapPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoFormatAV1QuantizationMapPropertiesKHR.html
struct VideoFormatAV1QuantizationMapPropertiesKHR
{
using NativeType = VkVideoFormatAV1QuantizationMapPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoFormatAV1QuantizationMapPropertiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, compatibleSuperblockSizes{ compatibleSuperblockSizes_ }
{}
VULKAN_HPP_CONSTEXPR VideoFormatAV1QuantizationMapPropertiesKHR( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoFormatAV1QuantizationMapPropertiesKHR( VkVideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoFormatAV1QuantizationMapPropertiesKHR( *reinterpret_cast<VideoFormatAV1QuantizationMapPropertiesKHR const *>( &rhs ) )
{}
VideoFormatAV1QuantizationMapPropertiesKHR & operator=( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoFormatAV1QuantizationMapPropertiesKHR & operator=( VkVideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoFormatAV1QuantizationMapPropertiesKHR const *>( &rhs );
return *this;
}
operator VkVideoFormatAV1QuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoFormatAV1QuantizationMapPropertiesKHR*>( this );
}
operator VkVideoFormatAV1QuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoFormatAV1QuantizationMapPropertiesKHR*>( this );
}
operator VkVideoFormatAV1QuantizationMapPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoFormatAV1QuantizationMapPropertiesKHR*>( this );
}
operator VkVideoFormatAV1QuantizationMapPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoFormatAV1QuantizationMapPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, compatibleSuperblockSizes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoFormatAV1QuantizationMapPropertiesKHR const & ) const = default;
#else
bool operator==( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( compatibleSuperblockSizes == rhs.compatibleSuperblockSizes );
#endif
}
bool operator!=( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR>
{
using Type = VideoFormatAV1QuantizationMapPropertiesKHR;
};
// wrapper struct for struct VkVideoFormatH265QuantizationMapPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoFormatH265QuantizationMapPropertiesKHR.html
struct VideoFormatH265QuantizationMapPropertiesKHR
{
using NativeType = VkVideoFormatH265QuantizationMapPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatH265QuantizationMapPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoFormatH265QuantizationMapPropertiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, compatibleCtbSizes{ compatibleCtbSizes_ }
{}
VULKAN_HPP_CONSTEXPR VideoFormatH265QuantizationMapPropertiesKHR( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoFormatH265QuantizationMapPropertiesKHR( VkVideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoFormatH265QuantizationMapPropertiesKHR( *reinterpret_cast<VideoFormatH265QuantizationMapPropertiesKHR const *>( &rhs ) )
{}
VideoFormatH265QuantizationMapPropertiesKHR & operator=( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoFormatH265QuantizationMapPropertiesKHR & operator=( VkVideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoFormatH265QuantizationMapPropertiesKHR const *>( &rhs );
return *this;
}
operator VkVideoFormatH265QuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoFormatH265QuantizationMapPropertiesKHR*>( this );
}
operator VkVideoFormatH265QuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoFormatH265QuantizationMapPropertiesKHR*>( this );
}
operator VkVideoFormatH265QuantizationMapPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoFormatH265QuantizationMapPropertiesKHR*>( this );
}
operator VkVideoFormatH265QuantizationMapPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoFormatH265QuantizationMapPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, compatibleCtbSizes );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoFormatH265QuantizationMapPropertiesKHR const & ) const = default;
#else
bool operator==( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( compatibleCtbSizes == rhs.compatibleCtbSizes );
#endif
}
bool operator!=( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatH265QuantizationMapPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoFormatH265QuantizationMapPropertiesKHR>
{
using Type = VideoFormatH265QuantizationMapPropertiesKHR;
};
// wrapper struct for struct VkVideoFormatPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoFormatPropertiesKHR.html
struct VideoFormatPropertiesKHR
{
using NativeType = VkVideoFormatPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ComponentMapping componentMapping_ = {}, VULKAN_HPP_NAMESPACE::ImageCreateFlags imageCreateFlags_ = {}, VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::ImageTiling imageTiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsageFlags_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, format{ format_ }, componentMapping{ componentMapping_ }, imageCreateFlags{ imageCreateFlags_ }, imageType{ imageType_ }, imageTiling{ imageTiling_ }, imageUsageFlags{ imageUsageFlags_ }
{}
VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoFormatPropertiesKHR( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoFormatPropertiesKHR( *reinterpret_cast<VideoFormatPropertiesKHR const *>( &rhs ) )
{}
VideoFormatPropertiesKHR & operator=( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoFormatPropertiesKHR & operator=( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR const *>( &rhs );
return *this;
}
operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoFormatPropertiesKHR*>( this );
}
operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoFormatPropertiesKHR*>( this );
}
operator VkVideoFormatPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoFormatPropertiesKHR*>( this );
}
operator VkVideoFormatPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoFormatPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::ImageCreateFlags const &, VULKAN_HPP_NAMESPACE::ImageType const &, VULKAN_HPP_NAMESPACE::ImageTiling const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, format, componentMapping, imageCreateFlags, imageType, imageTiling, imageUsageFlags );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoFormatPropertiesKHR const & ) const = default;
#else
bool operator==( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( format == rhs.format )
&& ( componentMapping == rhs.componentMapping )
&& ( imageCreateFlags == rhs.imageCreateFlags )
&& ( imageType == rhs.imageType )
&& ( imageTiling == rhs.imageTiling )
&& ( imageUsageFlags == rhs.imageUsageFlags );
#endif
}
bool operator!=( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::ComponentMapping componentMapping = {};
VULKAN_HPP_NAMESPACE::ImageCreateFlags imageCreateFlags = {};
VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D;
VULKAN_HPP_NAMESPACE::ImageTiling imageTiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsageFlags = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoFormatPropertiesKHR>
{
using Type = VideoFormatPropertiesKHR;
};
// wrapper struct for struct VkVideoFormatQuantizationMapPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoFormatQuantizationMapPropertiesKHR.html
struct VideoFormatQuantizationMapPropertiesKHR
{
using NativeType = VkVideoFormatQuantizationMapPropertiesKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatQuantizationMapPropertiesKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoFormatQuantizationMapPropertiesKHR(VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, quantizationMapTexelSize{ quantizationMapTexelSize_ }
{}
VULKAN_HPP_CONSTEXPR VideoFormatQuantizationMapPropertiesKHR( VideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoFormatQuantizationMapPropertiesKHR( VkVideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoFormatQuantizationMapPropertiesKHR( *reinterpret_cast<VideoFormatQuantizationMapPropertiesKHR const *>( &rhs ) )
{}
VideoFormatQuantizationMapPropertiesKHR & operator=( VideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoFormatQuantizationMapPropertiesKHR & operator=( VkVideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoFormatQuantizationMapPropertiesKHR const *>( &rhs );
return *this;
}
operator VkVideoFormatQuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoFormatQuantizationMapPropertiesKHR*>( this );
}
operator VkVideoFormatQuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoFormatQuantizationMapPropertiesKHR*>( this );
}
operator VkVideoFormatQuantizationMapPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoFormatQuantizationMapPropertiesKHR*>( this );
}
operator VkVideoFormatQuantizationMapPropertiesKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoFormatQuantizationMapPropertiesKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, quantizationMapTexelSize );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoFormatQuantizationMapPropertiesKHR const & ) const = default;
#else
bool operator==( VideoFormatQuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( quantizationMapTexelSize == rhs.quantizationMapTexelSize );
#endif
}
bool operator!=( VideoFormatQuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatQuantizationMapPropertiesKHR;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoFormatQuantizationMapPropertiesKHR>
{
using Type = VideoFormatQuantizationMapPropertiesKHR;
};
// wrapper struct for struct VkVideoInlineQueryInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoInlineQueryInfoKHR.html
struct VideoInlineQueryInfoKHR
{
using NativeType = VkVideoInlineQueryInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoInlineQueryInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoInlineQueryInfoKHR(VULKAN_HPP_NAMESPACE::QueryPool queryPool_ = {}, uint32_t firstQuery_ = {}, uint32_t queryCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, queryPool{ queryPool_ }, firstQuery{ firstQuery_ }, queryCount{ queryCount_ }
{}
VULKAN_HPP_CONSTEXPR VideoInlineQueryInfoKHR( VideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoInlineQueryInfoKHR( VkVideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoInlineQueryInfoKHR( *reinterpret_cast<VideoInlineQueryInfoKHR const *>( &rhs ) )
{}
VideoInlineQueryInfoKHR & operator=( VideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoInlineQueryInfoKHR & operator=( VkVideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoInlineQueryInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool_ ) VULKAN_HPP_NOEXCEPT
{
queryPool = queryPool_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setFirstQuery( uint32_t firstQuery_ ) VULKAN_HPP_NOEXCEPT
{
firstQuery = firstQuery_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
{
queryCount = queryCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoInlineQueryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoInlineQueryInfoKHR*>( this );
}
operator VkVideoInlineQueryInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoInlineQueryInfoKHR*>( this );
}
operator VkVideoInlineQueryInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoInlineQueryInfoKHR*>( this );
}
operator VkVideoInlineQueryInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoInlineQueryInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::QueryPool const &, uint32_t const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, queryPool, firstQuery, queryCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoInlineQueryInfoKHR const & ) const = default;
#else
bool operator==( VideoInlineQueryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( queryPool == rhs.queryPool )
&& ( firstQuery == rhs.firstQuery )
&& ( queryCount == rhs.queryCount );
#endif
}
bool operator!=( VideoInlineQueryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoInlineQueryInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::QueryPool queryPool = {};
uint32_t firstQuery = {};
uint32_t queryCount = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoInlineQueryInfoKHR>
{
using Type = VideoInlineQueryInfoKHR;
};
// wrapper struct for struct VkVideoProfileListInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoProfileListInfoKHR.html
struct VideoProfileListInfoKHR
{
using NativeType = VkVideoProfileListInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileListInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR(uint32_t profileCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, profileCount{ profileCount_ }, pProfiles{ pProfiles_ }
{}
VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoProfileListInfoKHR( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoProfileListInfoKHR( *reinterpret_cast<VideoProfileListInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoProfileListInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR> const & profiles_, const void * pNext_ = nullptr )
: pNext( pNext_ ), profileCount( static_cast<uint32_t>( profiles_.size() ) ), pProfiles( profiles_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VideoProfileListInfoKHR & operator=( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoProfileListInfoKHR & operator=( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setProfileCount( uint32_t profileCount_ ) VULKAN_HPP_NOEXCEPT
{
profileCount = profileCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPProfiles( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles_ ) VULKAN_HPP_NOEXCEPT
{
pProfiles = pProfiles_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoProfileListInfoKHR & setProfiles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR> const & profiles_ ) VULKAN_HPP_NOEXCEPT
{
profileCount = static_cast<uint32_t>( profiles_.size() );
pProfiles = profiles_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoProfileListInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoProfileListInfoKHR*>( this );
}
operator VkVideoProfileListInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoProfileListInfoKHR*>( this );
}
operator VkVideoProfileListInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoProfileListInfoKHR*>( this );
}
operator VkVideoProfileListInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoProfileListInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, profileCount, pProfiles );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoProfileListInfoKHR const & ) const = default;
#else
bool operator==( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( profileCount == rhs.profileCount )
&& ( pProfiles == rhs.pProfiles );
#endif
}
bool operator!=( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileListInfoKHR;
const void * pNext = {};
uint32_t profileCount = {};
const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoProfileListInfoKHR>
{
using Type = VideoProfileListInfoKHR;
};
// wrapper struct for struct VkVideoSessionCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionCreateInfoKHR.html
struct VideoSessionCreateInfoKHR
{
using NativeType = VkVideoSessionCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR(uint32_t queueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ = {}, VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, VULKAN_HPP_NAMESPACE::Format referencePictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t maxDpbSlots_ = {}, uint32_t maxActiveReferencePictures_ = {}, const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, queueFamilyIndex{ queueFamilyIndex_ }, flags{ flags_ }, pVideoProfile{ pVideoProfile_ }, pictureFormat{ pictureFormat_ }, maxCodedExtent{ maxCodedExtent_ }, referencePictureFormat{ referencePictureFormat_ }, maxDpbSlots{ maxDpbSlots_ }, maxActiveReferencePictures{ maxActiveReferencePictures_ }, pStdHeaderVersion{ pStdHeaderVersion_ }
{}
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoSessionCreateInfoKHR( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoSessionCreateInfoKHR( *reinterpret_cast<VideoSessionCreateInfoKHR const *>( &rhs ) )
{}
VideoSessionCreateInfoKHR & operator=( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoSessionCreateInfoKHR & operator=( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndex = queueFamilyIndex_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT
{
pVideoProfile = pVideoProfile_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPictureFormat( VULKAN_HPP_NAMESPACE::Format pictureFormat_ ) VULKAN_HPP_NOEXCEPT
{
pictureFormat = pictureFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxCodedExtent_ ) VULKAN_HPP_NOEXCEPT
{
maxCodedExtent = maxCodedExtent_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setReferencePictureFormat( VULKAN_HPP_NAMESPACE::Format referencePictureFormat_ ) VULKAN_HPP_NOEXCEPT
{
referencePictureFormat = referencePictureFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxDpbSlots( uint32_t maxDpbSlots_ ) VULKAN_HPP_NOEXCEPT
{
maxDpbSlots = maxDpbSlots_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxActiveReferencePictures( uint32_t maxActiveReferencePictures_ ) VULKAN_HPP_NOEXCEPT
{
maxActiveReferencePictures = maxActiveReferencePictures_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPStdHeaderVersion( const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ ) VULKAN_HPP_NOEXCEPT
{
pStdHeaderVersion = pStdHeaderVersion_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoSessionCreateInfoKHR*>( this );
}
operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoSessionCreateInfoKHR*>( this );
}
operator VkVideoSessionCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoSessionCreateInfoKHR*>( this );
}
operator VkVideoSessionCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoSessionCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR const &, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Format const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ExtensionProperties * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, queueFamilyIndex, flags, pVideoProfile, pictureFormat, maxCodedExtent, referencePictureFormat, maxDpbSlots, maxActiveReferencePictures, pStdHeaderVersion );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoSessionCreateInfoKHR const & ) const = default;
#else
bool operator==( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( queueFamilyIndex == rhs.queueFamilyIndex )
&& ( flags == rhs.flags )
&& ( pVideoProfile == rhs.pVideoProfile )
&& ( pictureFormat == rhs.pictureFormat )
&& ( maxCodedExtent == rhs.maxCodedExtent )
&& ( referencePictureFormat == rhs.referencePictureFormat )
&& ( maxDpbSlots == rhs.maxDpbSlots )
&& ( maxActiveReferencePictures == rhs.maxActiveReferencePictures )
&& ( pStdHeaderVersion == rhs.pStdHeaderVersion );
#endif
}
bool operator!=( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR;
const void * pNext = {};
uint32_t queueFamilyIndex = {};
VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {};
const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile = {};
VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {};
VULKAN_HPP_NAMESPACE::Format referencePictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
uint32_t maxDpbSlots = {};
uint32_t maxActiveReferencePictures = {};
const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoSessionCreateInfoKHR>
{
using Type = VideoSessionCreateInfoKHR;
};
// wrapper struct for struct VkVideoSessionMemoryRequirementsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionMemoryRequirementsKHR.html
struct VideoSessionMemoryRequirementsKHR
{
using NativeType = VkVideoSessionMemoryRequirementsKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionMemoryRequirementsKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR(uint32_t memoryBindIndex_ = {}, VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, memoryBindIndex{ memoryBindIndex_ }, memoryRequirements{ memoryRequirements_ }
{}
VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoSessionMemoryRequirementsKHR( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoSessionMemoryRequirementsKHR( *reinterpret_cast<VideoSessionMemoryRequirementsKHR const *>( &rhs ) )
{}
VideoSessionMemoryRequirementsKHR & operator=( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoSessionMemoryRequirementsKHR & operator=( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR const *>( &rhs );
return *this;
}
operator VkVideoSessionMemoryRequirementsKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoSessionMemoryRequirementsKHR*>( this );
}
operator VkVideoSessionMemoryRequirementsKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoSessionMemoryRequirementsKHR*>( this );
}
operator VkVideoSessionMemoryRequirementsKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoSessionMemoryRequirementsKHR*>( this );
}
operator VkVideoSessionMemoryRequirementsKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoSessionMemoryRequirementsKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::MemoryRequirements const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, memoryBindIndex, memoryRequirements );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoSessionMemoryRequirementsKHR const & ) const = default;
#else
bool operator==( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memoryBindIndex == rhs.memoryBindIndex )
&& ( memoryRequirements == rhs.memoryRequirements );
#endif
}
bool operator!=( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionMemoryRequirementsKHR;
void * pNext = {};
uint32_t memoryBindIndex = {};
VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoSessionMemoryRequirementsKHR>
{
using Type = VideoSessionMemoryRequirementsKHR;
};
// wrapper struct for struct VkVideoSessionParametersCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionParametersCreateInfoKHR.html
struct VideoSessionParametersCreateInfoKHR
{
using NativeType = VkVideoSessionParametersCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, videoSessionParametersTemplate{ videoSessionParametersTemplate_ }, videoSession{ videoSession_ }
{}
VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoSessionParametersCreateInfoKHR( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoSessionParametersCreateInfoKHR( *reinterpret_cast<VideoSessionParametersCreateInfoKHR const *>( &rhs ) )
{}
VideoSessionParametersCreateInfoKHR & operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoSessionParametersCreateInfoKHR & operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSessionParametersTemplate( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ ) VULKAN_HPP_NOEXCEPT
{
videoSessionParametersTemplate = videoSessionParametersTemplate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
{
videoSession = videoSession_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR*>( this );
}
operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoSessionParametersCreateInfoKHR*>( this );
}
operator VkVideoSessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR*>( this );
}
operator VkVideoSessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoSessionParametersCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const &, VULKAN_HPP_NAMESPACE::VideoSessionKHR const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, videoSessionParametersTemplate, videoSession );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoSessionParametersCreateInfoKHR const & ) const = default;
#else
bool operator==( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate )
&& ( videoSession == rhs.videoSession );
#endif
}
bool operator!=( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate = {};
VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoSessionParametersCreateInfoKHR>
{
using Type = VideoSessionParametersCreateInfoKHR;
};
// wrapper struct for struct VkVideoSessionParametersUpdateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionParametersUpdateInfoKHR.html
struct VideoSessionParametersUpdateInfoKHR
{
using NativeType = VkVideoSessionParametersUpdateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersUpdateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR(uint32_t updateSequenceCount_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, updateSequenceCount{ updateSequenceCount_ }
{}
VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoSessionParametersUpdateInfoKHR( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoSessionParametersUpdateInfoKHR( *reinterpret_cast<VideoSessionParametersUpdateInfoKHR const *>( &rhs ) )
{}
VideoSessionParametersUpdateInfoKHR & operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
VideoSessionParametersUpdateInfoKHR & operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setUpdateSequenceCount( uint32_t updateSequenceCount_ ) VULKAN_HPP_NOEXCEPT
{
updateSequenceCount = updateSequenceCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR*>( this );
}
operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoSessionParametersUpdateInfoKHR*>( this );
}
operator VkVideoSessionParametersUpdateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR*>( this );
}
operator VkVideoSessionParametersUpdateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkVideoSessionParametersUpdateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, updateSequenceCount );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( VideoSessionParametersUpdateInfoKHR const & ) const = default;
#else
bool operator==( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( updateSequenceCount == rhs.updateSequenceCount );
#endif
}
bool operator!=( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersUpdateInfoKHR;
const void * pNext = {};
uint32_t updateSequenceCount = {};
};
template <>
struct CppType<StructureType, StructureType::eVideoSessionParametersUpdateInfoKHR>
{
using Type = VideoSessionParametersUpdateInfoKHR;
};
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
// wrapper struct for struct VkWaylandSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWaylandSurfaceCreateInfoKHR.html
struct WaylandSurfaceCreateInfoKHR
{
using NativeType = VkWaylandSurfaceCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, struct wl_display * display_ = {}, struct wl_surface * surface_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, display{ display_ }, surface{ surface_ }
{}
VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: WaylandSurfaceCreateInfoKHR( *reinterpret_cast<WaylandSurfaceCreateInfoKHR const *>( &rhs ) )
{}
WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display * display_ ) VULKAN_HPP_NOEXCEPT
{
display = display_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface * surface_ ) VULKAN_HPP_NOEXCEPT
{
surface = surface_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( this );
}
operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR*>( this );
}
operator VkWaylandSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( this );
}
operator VkWaylandSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkWaylandSurfaceCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR const &, struct wl_display * const &, struct wl_surface * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, display, surface );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( WaylandSurfaceCreateInfoKHR const & ) const = default;
#else
bool operator==( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( display == rhs.display )
&& ( surface == rhs.surface );
#endif
}
bool operator!=( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {};
struct wl_display * display = {};
struct wl_surface * surface = {};
};
template <>
struct CppType<StructureType, StructureType::eWaylandSurfaceCreateInfoKHR>
{
using Type = WaylandSurfaceCreateInfoKHR;
};
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkWin32KeyedMutexAcquireReleaseInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWin32KeyedMutexAcquireReleaseInfoKHR.html
struct Win32KeyedMutexAcquireReleaseInfoKHR
{
using NativeType = VkWin32KeyedMutexAcquireReleaseInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, const uint64_t * pAcquireKeys_ = {}, const uint32_t * pAcquireTimeouts_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, const uint64_t * pReleaseKeys_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, acquireCount{ acquireCount_ }, pAcquireSyncs{ pAcquireSyncs_ }, pAcquireKeys{ pAcquireKeys_ }, pAcquireTimeouts{ pAcquireTimeouts_ }, releaseCount{ releaseCount_ }, pReleaseSyncs{ pReleaseSyncs_ }, pReleaseKeys{ pReleaseKeys_ }
{}
VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: Win32KeyedMutexAcquireReleaseInfoKHR( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
Win32KeyedMutexAcquireReleaseInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeouts( acquireTimeouts_.data() ), releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() );
VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() );
#else
if ( acquireSyncs_.size() != acquireKeys_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" );
}
if ( acquireSyncs_.size() != acquireTimeouts_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" );
}
if ( acquireKeys_.size() != acquireTimeouts_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
#else
if ( releaseSyncs_.size() != releaseKeys_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = acquireCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireSyncs = pAcquireSyncs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
pAcquireSyncs = acquireSyncs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireKeys = pAcquireKeys_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
pAcquireKeys = acquireKeys_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireTimeouts = pAcquireTimeouts_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = static_cast<uint32_t>( acquireTimeouts_.size() );
pAcquireTimeouts = acquireTimeouts_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = releaseCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
{
pReleaseSyncs = pReleaseSyncs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
pReleaseSyncs = releaseSyncs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
{
pReleaseKeys = pReleaseKeys_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
pReleaseKeys = releaseKeys_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
}
operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
}
operator VkWin32KeyedMutexAcquireReleaseInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
}
operator VkWin32KeyedMutexAcquireReleaseInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceMemory * const &, const uint64_t * const &, const uint32_t * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceMemory * const &, const uint64_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeouts, releaseCount, pReleaseSyncs, pReleaseKeys );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const & ) const = default;
#else
bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( acquireCount == rhs.acquireCount )
&& ( pAcquireSyncs == rhs.pAcquireSyncs )
&& ( pAcquireKeys == rhs.pAcquireKeys )
&& ( pAcquireTimeouts == rhs.pAcquireTimeouts )
&& ( releaseCount == rhs.releaseCount )
&& ( pReleaseSyncs == rhs.pReleaseSyncs )
&& ( pReleaseKeys == rhs.pReleaseKeys );
#endif
}
bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
const void * pNext = {};
uint32_t acquireCount = {};
const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {};
const uint64_t * pAcquireKeys = {};
const uint32_t * pAcquireTimeouts = {};
uint32_t releaseCount = {};
const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {};
const uint64_t * pReleaseKeys = {};
};
template <>
struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR>
{
using Type = Win32KeyedMutexAcquireReleaseInfoKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkWin32KeyedMutexAcquireReleaseInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWin32KeyedMutexAcquireReleaseInfoNV.html
struct Win32KeyedMutexAcquireReleaseInfoNV
{
using NativeType = VkWin32KeyedMutexAcquireReleaseInfoNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, const uint64_t * pAcquireKeys_ = {}, const uint32_t * pAcquireTimeoutMilliseconds_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, const uint64_t * pReleaseKeys_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, acquireCount{ acquireCount_ }, pAcquireSyncs{ pAcquireSyncs_ }, pAcquireKeys{ pAcquireKeys_ }, pAcquireTimeoutMilliseconds{ pAcquireTimeoutMilliseconds_ }, releaseCount{ releaseCount_ }, pReleaseSyncs{ pReleaseSyncs_ }, pReleaseKeys{ pReleaseKeys_ }
{}
VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: Win32KeyedMutexAcquireReleaseInfoNV( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
Win32KeyedMutexAcquireReleaseInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {}, const void * pNext_ = nullptr )
: pNext( pNext_ ), acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() ), releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() );
VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() );
#else
if ( acquireSyncs_.size() != acquireKeys_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" );
}
if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" );
}
if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
#else
if ( releaseSyncs_.size() != releaseKeys_.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" );
}
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
Win32KeyedMutexAcquireReleaseInfoNV & operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = acquireCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireSyncs = pAcquireSyncs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
pAcquireSyncs = acquireSyncs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireKeys = pAcquireKeys_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
pAcquireKeys = acquireKeys_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = static_cast<uint32_t>( acquireTimeoutMilliseconds_.size() );
pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = releaseCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
{
pReleaseSyncs = pReleaseSyncs_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
pReleaseSyncs = releaseSyncs_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
{
pReleaseKeys = pReleaseKeys_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
pReleaseKeys = releaseKeys_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
}
operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
}
operator VkWin32KeyedMutexAcquireReleaseInfoNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
}
operator VkWin32KeyedMutexAcquireReleaseInfoNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceMemory * const &, const uint64_t * const &, const uint32_t * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceMemory * const &, const uint64_t * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeoutMilliseconds, releaseCount, pReleaseSyncs, pReleaseKeys );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const & ) const = default;
#else
bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( acquireCount == rhs.acquireCount )
&& ( pAcquireSyncs == rhs.pAcquireSyncs )
&& ( pAcquireKeys == rhs.pAcquireKeys )
&& ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds )
&& ( releaseCount == rhs.releaseCount )
&& ( pReleaseSyncs == rhs.pReleaseSyncs )
&& ( pReleaseKeys == rhs.pReleaseKeys );
#endif
}
bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
const void * pNext = {};
uint32_t acquireCount = {};
const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {};
const uint64_t * pAcquireKeys = {};
const uint32_t * pAcquireTimeoutMilliseconds = {};
uint32_t releaseCount = {};
const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {};
const uint64_t * pReleaseKeys = {};
};
template <>
struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoNV>
{
using Type = Win32KeyedMutexAcquireReleaseInfoNV;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
// wrapper struct for struct VkWin32SurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWin32SurfaceCreateInfoKHR.html
struct Win32SurfaceCreateInfoKHR
{
using NativeType = VkWin32SurfaceCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, HINSTANCE hinstance_ = {}, HWND hwnd_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, hinstance{ hinstance_ }, hwnd{ hwnd_ }
{}
VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: Win32SurfaceCreateInfoKHR( *reinterpret_cast<Win32SurfaceCreateInfoKHR const *>( &rhs ) )
{}
Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT
{
hinstance = hinstance_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT
{
hwnd = hwnd_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( this );
}
operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWin32SurfaceCreateInfoKHR*>( this );
}
operator VkWin32SurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( this );
}
operator VkWin32SurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkWin32SurfaceCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR const &, HINSTANCE const &, HWND const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, hinstance, hwnd );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( Win32SurfaceCreateInfoKHR const & ) const = default;
#else
bool operator==( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( hinstance == rhs.hinstance )
&& ( hwnd == rhs.hwnd );
#endif
}
bool operator!=( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {};
HINSTANCE hinstance = {};
HWND hwnd = {};
};
template <>
struct CppType<StructureType, StructureType::eWin32SurfaceCreateInfoKHR>
{
using Type = Win32SurfaceCreateInfoKHR;
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
// wrapper struct for struct VkWriteDescriptorSetAccelerationStructureKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetAccelerationStructureKHR.html
struct WriteDescriptorSetAccelerationStructureKHR
{
using NativeType = VkWriteDescriptorSetAccelerationStructureKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, accelerationStructureCount{ accelerationStructureCount_ }, pAccelerationStructures{ pAccelerationStructures_ }
{}
VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: WriteDescriptorSetAccelerationStructureKHR( *reinterpret_cast<WriteDescriptorSetAccelerationStructureKHR const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSetAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_, const void * pNext_ = nullptr )
: pNext( pNext_ ), accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) ), pAccelerationStructures( accelerationStructures_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
WriteDescriptorSetAccelerationStructureKHR & operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureCount = accelerationStructureCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
pAccelerationStructures = pAccelerationStructures_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
pAccelerationStructures = accelerationStructures_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureKHR*>( this );
}
operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureKHR*>( this );
}
operator VkWriteDescriptorSetAccelerationStructureKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureKHR*>( this );
}
operator VkWriteDescriptorSetAccelerationStructureKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkWriteDescriptorSetAccelerationStructureKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const & ) const = default;
#else
bool operator==( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( accelerationStructureCount == rhs.accelerationStructureCount )
&& ( pAccelerationStructures == rhs.pAccelerationStructures );
#endif
}
bool operator!=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
const void * pNext = {};
uint32_t accelerationStructureCount = {};
const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures = {};
};
template <>
struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureKHR>
{
using Type = WriteDescriptorSetAccelerationStructureKHR;
};
// wrapper struct for struct VkWriteDescriptorSetAccelerationStructureNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetAccelerationStructureNV.html
struct WriteDescriptorSetAccelerationStructureNV
{
using NativeType = VkWriteDescriptorSetAccelerationStructureNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, accelerationStructureCount{ accelerationStructureCount_ }, pAccelerationStructures{ pAccelerationStructures_ }
{}
VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
: WriteDescriptorSetAccelerationStructureNV( *reinterpret_cast<WriteDescriptorSetAccelerationStructureNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSetAccelerationStructureNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures_, const void * pNext_ = nullptr )
: pNext( pNext_ ), accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) ), pAccelerationStructures( accelerationStructures_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
WriteDescriptorSetAccelerationStructureNV & operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
WriteDescriptorSetAccelerationStructureNV & operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureCount = accelerationStructureCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
pAccelerationStructures = pAccelerationStructures_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSetAccelerationStructureNV & setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
pAccelerationStructures = accelerationStructures_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV*>( this );
}
operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV*>( this );
}
operator VkWriteDescriptorSetAccelerationStructureNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV*>( this );
}
operator VkWriteDescriptorSetAccelerationStructureNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( WriteDescriptorSetAccelerationStructureNV const & ) const = default;
#else
bool operator==( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( accelerationStructureCount == rhs.accelerationStructureCount )
&& ( pAccelerationStructures == rhs.pAccelerationStructures );
#endif
}
bool operator!=( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
const void * pNext = {};
uint32_t accelerationStructureCount = {};
const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures = {};
};
template <>
struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureNV>
{
using Type = WriteDescriptorSetAccelerationStructureNV;
};
// wrapper struct for struct VkWriteDescriptorSetInlineUniformBlock, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetInlineUniformBlock.html
struct WriteDescriptorSetInlineUniformBlock
{
using NativeType = VkWriteDescriptorSetInlineUniformBlock;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlock;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock(uint32_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, dataSize{ dataSize_ }, pData{ pData_ }
{}
VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;
WriteDescriptorSetInlineUniformBlock( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
: WriteDescriptorSetInlineUniformBlock( *reinterpret_cast<WriteDescriptorSetInlineUniformBlock const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
WriteDescriptorSetInlineUniformBlock( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_, const void * pNext_ = nullptr )
: pNext( pNext_ ), dataSize( static_cast<uint32_t>( data_.size() * sizeof(T) ) ), pData( data_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
WriteDescriptorSetInlineUniformBlock & operator=( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
WriteDescriptorSetInlineUniformBlock & operator=( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = dataSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
{
pData = pData_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
WriteDescriptorSetInlineUniformBlock & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = static_cast<uint32_t>( data_.size() * sizeof(T) );
pData = data_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkWriteDescriptorSetInlineUniformBlock const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlock*>( this );
}
operator VkWriteDescriptorSetInlineUniformBlock &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlock*>( this );
}
operator VkWriteDescriptorSetInlineUniformBlock const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlock*>( this );
}
operator VkWriteDescriptorSetInlineUniformBlock *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkWriteDescriptorSetInlineUniformBlock*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const void * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, dataSize, pData );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( WriteDescriptorSetInlineUniformBlock const & ) const = default;
#else
bool operator==( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( dataSize == rhs.dataSize )
&& ( pData == rhs.pData );
#endif
}
bool operator!=( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlock;
const void * pNext = {};
uint32_t dataSize = {};
const void * pData = {};
};
template <>
struct CppType<StructureType, StructureType::eWriteDescriptorSetInlineUniformBlock>
{
using Type = WriteDescriptorSetInlineUniformBlock;
};
using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock;
// wrapper struct for struct VkWriteDescriptorSetPartitionedAccelerationStructureNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetPartitionedAccelerationStructureNV.html
struct WriteDescriptorSetPartitionedAccelerationStructureNV
{
using NativeType = VkWriteDescriptorSetPartitionedAccelerationStructureNV;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetPartitionedAccelerationStructureNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR WriteDescriptorSetPartitionedAccelerationStructureNV(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceAddress * pAccelerationStructures_ = {}, void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, accelerationStructureCount{ accelerationStructureCount_ }, pAccelerationStructures{ pAccelerationStructures_ }
{}
VULKAN_HPP_CONSTEXPR WriteDescriptorSetPartitionedAccelerationStructureNV( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
WriteDescriptorSetPartitionedAccelerationStructureNV( VkWriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
: WriteDescriptorSetPartitionedAccelerationStructureNV( *reinterpret_cast<WriteDescriptorSetPartitionedAccelerationStructureNV const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSetPartitionedAccelerationStructureNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & accelerationStructures_, void * pNext_ = nullptr )
: pNext( pNext_ ), accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) ), pAccelerationStructures( accelerationStructures_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
WriteDescriptorSetPartitionedAccelerationStructureNV & operator=( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
WriteDescriptorSetPartitionedAccelerationStructureNV & operator=( VkWriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetPartitionedAccelerationStructureNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetPartitionedAccelerationStructureNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetPartitionedAccelerationStructureNV & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureCount = accelerationStructureCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetPartitionedAccelerationStructureNV & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::DeviceAddress * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
pAccelerationStructures = pAccelerationStructures_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSetPartitionedAccelerationStructureNV & setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
pAccelerationStructures = accelerationStructures_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkWriteDescriptorSetPartitionedAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteDescriptorSetPartitionedAccelerationStructureNV*>( this );
}
operator VkWriteDescriptorSetPartitionedAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWriteDescriptorSetPartitionedAccelerationStructureNV*>( this );
}
operator VkWriteDescriptorSetPartitionedAccelerationStructureNV const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkWriteDescriptorSetPartitionedAccelerationStructureNV*>( this );
}
operator VkWriteDescriptorSetPartitionedAccelerationStructureNV *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkWriteDescriptorSetPartitionedAccelerationStructureNV*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceAddress * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( WriteDescriptorSetPartitionedAccelerationStructureNV const & ) const = default;
#else
bool operator==( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( accelerationStructureCount == rhs.accelerationStructureCount )
&& ( pAccelerationStructures == rhs.pAccelerationStructures );
#endif
}
bool operator!=( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetPartitionedAccelerationStructureNV;
void * pNext = {};
uint32_t accelerationStructureCount = {};
const VULKAN_HPP_NAMESPACE::DeviceAddress * pAccelerationStructures = {};
};
template <>
struct CppType<StructureType, StructureType::eWriteDescriptorSetPartitionedAccelerationStructureNV>
{
using Type = WriteDescriptorSetPartitionedAccelerationStructureNV;
};
// wrapper struct for struct VkWriteDescriptorSetTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetTensorARM.html
struct WriteDescriptorSetTensorARM
{
using NativeType = VkWriteDescriptorSetTensorARM;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetTensorARM;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR WriteDescriptorSetTensorARM(uint32_t tensorViewCount_ = {}, const VULKAN_HPP_NAMESPACE::TensorViewARM * pTensorViews_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, tensorViewCount{ tensorViewCount_ }, pTensorViews{ pTensorViews_ }
{}
VULKAN_HPP_CONSTEXPR WriteDescriptorSetTensorARM( WriteDescriptorSetTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
WriteDescriptorSetTensorARM( VkWriteDescriptorSetTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT
: WriteDescriptorSetTensorARM( *reinterpret_cast<WriteDescriptorSetTensorARM const *>( &rhs ) )
{}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSetTensorARM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::TensorViewARM> const & tensorViews_, const void * pNext_ = nullptr )
: pNext( pNext_ ), tensorViewCount( static_cast<uint32_t>( tensorViews_.size() ) ), pTensorViews( tensorViews_.data() )
{}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
WriteDescriptorSetTensorARM & operator=( WriteDescriptorSetTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
WriteDescriptorSetTensorARM & operator=( VkWriteDescriptorSetTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetTensorARM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetTensorARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetTensorARM & setTensorViewCount( uint32_t tensorViewCount_ ) VULKAN_HPP_NOEXCEPT
{
tensorViewCount = tensorViewCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetTensorARM & setPTensorViews( const VULKAN_HPP_NAMESPACE::TensorViewARM * pTensorViews_ ) VULKAN_HPP_NOEXCEPT
{
pTensorViews = pTensorViews_;
return *this;
}
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSetTensorARM & setTensorViews( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::TensorViewARM> const & tensorViews_ ) VULKAN_HPP_NOEXCEPT
{
tensorViewCount = static_cast<uint32_t>( tensorViews_.size() );
pTensorViews = tensorViews_.data();
return *this;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkWriteDescriptorSetTensorARM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteDescriptorSetTensorARM*>( this );
}
operator VkWriteDescriptorSetTensorARM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWriteDescriptorSetTensorARM*>( this );
}
operator VkWriteDescriptorSetTensorARM const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkWriteDescriptorSetTensorARM*>( this );
}
operator VkWriteDescriptorSetTensorARM *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkWriteDescriptorSetTensorARM*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::TensorViewARM * const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, tensorViewCount, pTensorViews );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( WriteDescriptorSetTensorARM const & ) const = default;
#else
bool operator==( WriteDescriptorSetTensorARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( tensorViewCount == rhs.tensorViewCount )
&& ( pTensorViews == rhs.pTensorViews );
#endif
}
bool operator!=( WriteDescriptorSetTensorARM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetTensorARM;
const void * pNext = {};
uint32_t tensorViewCount = {};
const VULKAN_HPP_NAMESPACE::TensorViewARM * pTensorViews = {};
};
template <>
struct CppType<StructureType, StructureType::eWriteDescriptorSetTensorARM>
{
using Type = WriteDescriptorSetTensorARM;
};
// wrapper struct for struct VkWriteIndirectExecutionSetPipelineEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteIndirectExecutionSetPipelineEXT.html
struct WriteIndirectExecutionSetPipelineEXT
{
using NativeType = VkWriteIndirectExecutionSetPipelineEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteIndirectExecutionSetPipelineEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetPipelineEXT(uint32_t index_ = {}, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, index{ index_ }, pipeline{ pipeline_ }
{}
VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetPipelineEXT( WriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
WriteIndirectExecutionSetPipelineEXT( VkWriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: WriteIndirectExecutionSetPipelineEXT( *reinterpret_cast<WriteIndirectExecutionSetPipelineEXT const *>( &rhs ) )
{}
WriteIndirectExecutionSetPipelineEXT & operator=( WriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
WriteIndirectExecutionSetPipelineEXT & operator=( VkWriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
{
index = index_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkWriteIndirectExecutionSetPipelineEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteIndirectExecutionSetPipelineEXT*>( this );
}
operator VkWriteIndirectExecutionSetPipelineEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWriteIndirectExecutionSetPipelineEXT*>( this );
}
operator VkWriteIndirectExecutionSetPipelineEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkWriteIndirectExecutionSetPipelineEXT*>( this );
}
operator VkWriteIndirectExecutionSetPipelineEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkWriteIndirectExecutionSetPipelineEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Pipeline const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, index, pipeline );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( WriteIndirectExecutionSetPipelineEXT const & ) const = default;
#else
bool operator==( WriteIndirectExecutionSetPipelineEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( index == rhs.index )
&& ( pipeline == rhs.pipeline );
#endif
}
bool operator!=( WriteIndirectExecutionSetPipelineEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteIndirectExecutionSetPipelineEXT;
const void * pNext = {};
uint32_t index = {};
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
};
template <>
struct CppType<StructureType, StructureType::eWriteIndirectExecutionSetPipelineEXT>
{
using Type = WriteIndirectExecutionSetPipelineEXT;
};
// wrapper struct for struct VkWriteIndirectExecutionSetShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteIndirectExecutionSetShaderEXT.html
struct WriteIndirectExecutionSetShaderEXT
{
using NativeType = VkWriteIndirectExecutionSetShaderEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteIndirectExecutionSetShaderEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetShaderEXT(uint32_t index_ = {}, VULKAN_HPP_NAMESPACE::ShaderEXT shader_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, index{ index_ }, shader{ shader_ }
{}
VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetShaderEXT( WriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
WriteIndirectExecutionSetShaderEXT( VkWriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: WriteIndirectExecutionSetShaderEXT( *reinterpret_cast<WriteIndirectExecutionSetShaderEXT const *>( &rhs ) )
{}
WriteIndirectExecutionSetShaderEXT & operator=( WriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
WriteIndirectExecutionSetShaderEXT & operator=( VkWriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
{
index = index_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setShader( VULKAN_HPP_NAMESPACE::ShaderEXT shader_ ) VULKAN_HPP_NOEXCEPT
{
shader = shader_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkWriteIndirectExecutionSetShaderEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteIndirectExecutionSetShaderEXT*>( this );
}
operator VkWriteIndirectExecutionSetShaderEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWriteIndirectExecutionSetShaderEXT*>( this );
}
operator VkWriteIndirectExecutionSetShaderEXT const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkWriteIndirectExecutionSetShaderEXT*>( this );
}
operator VkWriteIndirectExecutionSetShaderEXT *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkWriteIndirectExecutionSetShaderEXT*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderEXT const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, index, shader );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>( WriteIndirectExecutionSetShaderEXT const & ) const = default;
#else
bool operator==( WriteIndirectExecutionSetShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
#else
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( index == rhs.index )
&& ( shader == rhs.shader );
#endif
}
bool operator!=( WriteIndirectExecutionSetShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteIndirectExecutionSetShaderEXT;
const void * pNext = {};
uint32_t index = {};
VULKAN_HPP_NAMESPACE::ShaderEXT shader = {};
};
template <>
struct CppType<StructureType, StructureType::eWriteIndirectExecutionSetShaderEXT>
{
using Type = WriteIndirectExecutionSetShaderEXT;
};
#if defined( VK_USE_PLATFORM_XCB_KHR )
// wrapper struct for struct VkXcbSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkXcbSurfaceCreateInfoKHR.html
struct XcbSurfaceCreateInfoKHR
{
using NativeType = VkXcbSurfaceCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, xcb_connection_t * connection_ = {}, xcb_window_t window_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, connection{ connection_ }, window{ window_ }
{}
VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: XcbSurfaceCreateInfoKHR( *reinterpret_cast<XcbSurfaceCreateInfoKHR const *>( &rhs ) )
{}
XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT
{
connection = connection_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT
{
window = window_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( this );
}
operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkXcbSurfaceCreateInfoKHR*>( this );
}
operator VkXcbSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( this );
}
operator VkXcbSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkXcbSurfaceCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR const &, xcb_connection_t * const &, xcb_window_t const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, connection, window );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = connection <=> rhs.connection; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( connection == rhs.connection )
&& ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 );
}
bool operator!=( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {};
xcb_connection_t * connection = {};
xcb_window_t window = {};
};
template <>
struct CppType<StructureType, StructureType::eXcbSurfaceCreateInfoKHR>
{
using Type = XcbSurfaceCreateInfoKHR;
};
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#if defined( VK_USE_PLATFORM_XLIB_KHR )
// wrapper struct for struct VkXlibSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkXlibSurfaceCreateInfoKHR.html
struct XlibSurfaceCreateInfoKHR
{
using NativeType = VkXlibSurfaceCreateInfoKHR;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, Display * dpy_ = {}, Window window_ = {}, const void * pNext_ = nullptr) VULKAN_HPP_NOEXCEPT
: pNext{ pNext_ }, flags{ flags_ }, dpy{ dpy_ }, window{ window_ }
{}
VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: XlibSurfaceCreateInfoKHR( *reinterpret_cast<XlibSurfaceCreateInfoKHR const *>( &rhs ) )
{}
XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/
XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setDpy( Display * dpy_ ) VULKAN_HPP_NOEXCEPT
{
dpy = dpy_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT
{
window = window_;
return *this;
}
#endif /*VULKAN_HPP_NO_SETTERS*/
operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( this );
}
operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkXlibSurfaceCreateInfoKHR*>( this );
}
operator VkXlibSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( this );
}
operator VkXlibSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
{
return reinterpret_cast<VkXlibSurfaceCreateInfoKHR*>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
#if 14 <= VULKAN_HPP_CPP_VERSION
auto
#else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR const &, Display * const &, Window const &>
#endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, flags, dpy, window );
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
std::strong_ordering operator<=>( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
if ( auto cmp = dpy <=> rhs.dpy; cmp != 0 ) return cmp;
if ( auto cmp = memcmp( &window, &rhs.window, sizeof( Window ) ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( dpy == rhs.dpy )
&& ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 );
}
bool operator!=( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {};
Display * dpy = {};
Window window = {};
};
template <>
struct CppType<StructureType, StructureType::eXlibSurfaceCreateInfoKHR>
{
using Type = XlibSurfaceCreateInfoKHR;
};
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
} // namespace VULKAN_HPP_NAMESPACE
#endif